index
int64 0
0
| repo_id
stringlengths 16
145
| file_path
stringlengths 27
196
| content
stringlengths 1
16.7M
|
---|---|---|---|
0 | capitalone_repos/edgetest | capitalone_repos/edgetest/edgetest/core.py | """Core module."""
import json
import shlex
from pathlib import Path
from subprocess import Popen
from typing import Dict, List, Optional
from pluggy._hooks import _HookRelay
from .logger import get_logger
from .utils import _isin_case_dashhyphen_ins, _run_command, pushd
LOG = get_logger(__name__)
class TestPackage:
"""Run test commands with bleeding edge dependencies.
Parameters
----------
hook : _HookRelay
The hook object from ``pluggy``.
envname : str
The name of the conda environment to create/use.
upgrade : list
The list of packages to upgrade
package_dir : str, optional (default None)
The location of the local package to install and test.
Attributes
----------
_basedir : str
The base directory location for each environment
status : bool
A boolean status indicator for whether or not the tests passed. Only populated
after ``run_tests`` has been executed.
"""
def __init__(
self,
hook: _HookRelay,
envname: str,
upgrade: List[str],
package_dir: Optional[str] = None,
):
"""Init method."""
self.hook = hook
self._basedir = Path(Path.cwd(), ".edgetest")
self._basedir.mkdir(exist_ok=True)
self.envname = envname
self.upgrade = upgrade
self.package_dir = package_dir or "."
self.status: bool = False
@property
def python_path(self) -> str:
"""Get the path to the python executable.
Returns
-------
str
The path to the python executable.
"""
return self.hook.path_to_python(basedir=self._basedir, envname=self.envname) # type: ignore
def setup(
self,
extras: Optional[List[str]] = None,
deps: Optional[List[str]] = None,
**options,
) -> None:
"""Set up the testing environment.
Parameters
----------
extras : list, optional (default None)
The list of extra installations to include.
deps : list, optional (default None)
A list of additional dependencies to install via ``pip``
**options
Additional options for ``self.hook.create_environment``.
Returns
-------
None
Raises
------
RuntimeError
This error will be raised if any part of the set up process fails.
"""
# Create the conda environment
LOG.info(f"Creating the following environment: {self.envname}...")
self.hook.create_environment(
basedir=self._basedir, envname=self.envname, conf=options
)
LOG.info(f"Successfully created {self.envname}")
# Install the local package
with pushd(self.package_dir):
pkg = "."
if extras:
pkg += f"[{', '.join(extras)}]"
if deps:
LOG.info(
f"Installing specified additional dependencies into {self.envname}..."
)
split = [shlex.split(dep) for dep in deps]
_run_command(
self.python_path,
"-m",
"pip",
"install",
*[itm for lst in split for itm in lst],
)
LOG.info(f"Installing the local package into {self.envname}...")
_run_command(self.python_path, "-m", "pip", "install", pkg)
LOG.info(f"Successfully installed the local package into {self.envname}...")
# Upgrade package(s)
LOG.info(
f"Upgrading the following packages in {self.envname}: {', '.join(self.upgrade)}"
)
self.hook.run_update(
basedir=self._basedir,
envname=self.envname,
upgrade=self.upgrade,
conf=options,
)
LOG.info(f"Successfully upgraded packages in {self.envname}")
def upgraded_packages(self) -> List[Dict[str, str]]:
"""Get the list of upgraded packages for the test environment.
Parameters
----------
None
Returns
-------
List
The output of ``pip list --format json``, filtered to the packages upgraded
for this environment.
"""
# Get the version for the upgraded package(s)
out, _ = _run_command(self.python_path, "-m", "pip", "list", "--format", "json")
outjson = json.loads(out)
upgrade_wo_extras = [pkg.split("[")[0] for pkg in self.upgrade]
return [
pkg
for pkg in outjson
if _isin_case_dashhyphen_ins(pkg.get("name", ""), upgrade_wo_extras)
]
def run_tests(self, command: str) -> int:
"""Run the tests in the package directory.
Parameters
----------
command : str
The test command
Returns
-------
int
The exit code
"""
with pushd(self.package_dir):
popen = Popen(
(self.python_path, "-m", *shlex.split(command)), universal_newlines=True
)
popen.communicate()
self.status = bool(popen.returncode == 0)
return popen.returncode
|
0 | capitalone_repos/edgetest | capitalone_repos/edgetest/edgetest/report.py | """Generate rST reports."""
from typing import Any, List
from tabulate import tabulate
from .core import TestPackage
VALID_OUTPUTS = ["rst", "github"]
def gen_report(testers: List[TestPackage], output_type: str = "rst") -> Any:
"""Generate a rST report.
Parameters
----------
testers : list
A list of ``TestPackage`` objects.
output_type : str
A valid output type of ``rst`` or ``github``
Returns
-------
Any
The report.
"""
if output_type not in VALID_OUTPUTS:
raise ValueError(f"Invalid output_type provided: {output_type}")
headers = ["Environment", "Passing tests", "Upgraded packages", "Package version"]
rows: List[List] = []
for env in testers:
upgraded = env.upgraded_packages()
for pkg in upgraded:
rows.append([env.envname, env.status, pkg["name"], pkg["version"]])
return tabulate(rows, headers=headers, tablefmt=output_type)
|
0 | capitalone_repos/edgetest | capitalone_repos/edgetest/edgetest/hookspecs.py | """Hook specifications for edgetest."""
from typing import Dict, List
import pluggy
from .schema import Schema
hookspec = pluggy.HookspecMarker("edgetest")
@hookspec
def addoption(schema: Schema):
"""Modify the schema for custom options.
You can add environment-level options through ``add_envoption`` or global
configuration options through ``add_globaloption``.
"""
@hookspec
def pre_run_hook(conf: Dict):
"""Pre-setup and test hook.
Parameters
----------
conf : Dict
The entire configuration dictionary.
"""
@hookspec(firstresult=True)
def path_to_python(basedir: str, envname: str) -> str:
"""Return the path to the python executable.
Parameters
----------
basedir : str
The base directory location for the environment.
envname : str
The name of the virtual environment.
Returns
-------
str
The path to the python executable for the environment. For installations
via ``pip``, we'll be running ``python -m pip install ...``, where ``python``
is the python executable for the environment.
"""
@hookspec(firstresult=True)
def create_environment(basedir: str, envname: str, conf: Dict):
"""Create the virtual environment for testing.
Parameters
----------
basedir : str
The base directory location for the environment.
envname : str
The name of the virtual environment.
conf : dict
The configuration dictionary for the environment. This is useful if you
want to add configuration arguments for additional dependencies that can
only be installed through the environment manager (e.g. Conda).
Raises
------
RuntimeError
Error raised if the environment cannot be created.
"""
@hookspec(firstresult=True)
def run_update(basedir: str, envname: str, upgrade: List, conf: Dict):
"""Update packages from upgrade list.
Parameters
----------
basedir : str
The base directory location for the environment.
envname : str
The name of the virtual environment.
upgrade : list
The list of packages to upgrade
conf : dict
The configuration dictionary for the environment. This is useful if you
want to add configuration arguments for additional dependencies that can
only be installed through the environment manager (e.g. Conda).
Raises
------
RuntimeError
Error raised if the packages cannot be updated.
"""
@hookspec
def post_run_hook(testers: List, conf: Dict):
"""Post testing hook.
For executing code after the environment set up and testing.
Parameters
----------
testers : list
The list of ``TestPackage`` objects.
conf : dict
The entire configuration dictionary.
"""
|
0 | capitalone_repos/edgetest | capitalone_repos/edgetest/edgetest/schema.py | """Define the Cerberus schema for the testing configuration."""
from typing import Dict, List
from cerberus import Validator
BASE_SCHEMA = {
"envs": {
"type": "list",
"required": True,
"schema": {
"type": "dict",
"schema": {
"name": {"type": "string", "coerce": "strip", "required": True},
"upgrade": {
"type": "list",
"schema": {
"type": "string",
},
"coerce": "listify",
"required": True,
},
"extras": {
"type": "list",
"schema": {"type": "string"},
"coerce": "listify",
"default": None,
"nullable": True,
},
"deps": {
"type": "list",
"schema": {"type": "string"},
"coerce": "listify",
"default": None,
"nullable": True,
},
"command": {"type": "string", "coerce": "strip", "default": "pytest"},
"package_dir": {"type": "string", "coerce": "strip", "default": "."},
},
},
}
}
class Schema:
"""An editable schema."""
schema = BASE_SCHEMA
def add_envoption(self, option: str, schema: Dict):
"""Add an environment-level option.
Parameters
----------
option : str
The name of the option. This will be the key in the key-value pair.
schema : dict
The schema for the option.
Examples
--------
>>> Schema().add_envoption("command", {"type": "string", "default": "pytest"})
"""
self.schema["envs"]["schema"]["schema"][option] = schema # type: ignore
def add_globaloption(self, option: str, schema: Dict):
"""Add a global option.
Parameters
----------
option : str
The name of the option. This will be the key in the key-value pair.
schema : dict
The schema for the option.
Examples
--------
>>> Schema().add_globaloption("print_message", {"type": "string"})
"""
self.schema[option] = schema
class EdgetestValidator(Validator):
"""Custom validator for coercing lists from ``.ini`` style files."""
def _normalize_coerce_listify(self, value: str) -> List:
"""Coerce a value into a list.
Parameters
----------
value : str
The original value for the field.
Returns
-------
list
The newline-separated list.
"""
if isinstance(value, str):
return value.strip().splitlines()
else:
return value
def _normalize_coerce_strip(self, value: str) -> str:
"""Remove leading and trailing spaces.
Parameters
----------
value : str
The original value for the field.
Returns
-------
str
The stripped string.
"""
return value.strip()
|
0 | capitalone_repos/edgetest | capitalone_repos/edgetest/edgetest/interface.py | """Command-line interface."""
from pathlib import Path
from typing import List
import click
import pluggy
from tomlkit import dumps
from . import hookspecs, lib
from .core import TestPackage
from .logger import get_logger
from .report import gen_report
from .schema import EdgetestValidator, Schema
from .utils import (
gen_requirements_config,
parse_cfg,
parse_toml,
upgrade_pyproject_toml,
upgrade_requirements,
upgrade_setup_cfg,
)
LOG = get_logger(__name__)
def get_plugin_manager() -> pluggy.PluginManager:
"""Get the plugin manager.
Registers the default ``venv`` plugin.
Returns
-------
PluginManager
The plugin manager.
"""
pm = pluggy.PluginManager("edgetest")
pm.add_hookspecs(hookspecs)
pm.load_setuptools_entrypoints("edgetest")
pm.register(lib)
return pm
@click.command()
@click.option(
"--config",
"-c",
default=None,
type=click.Path(exists=True),
help="Path to the test configuration file",
)
@click.option(
"--requirements",
"-r",
default="requirements.txt",
type=click.Path(),
help="Path to a requirements file",
)
@click.option(
"--environment",
"-e",
default=None,
help="Name of a specific environment to run",
)
@click.option(
"--notest",
is_flag=True,
help="Whether or not to run the test command for each environment",
)
@click.option(
"--nosetup",
is_flag=True,
help="Whether or not to only set up the conda environment(s)",
)
@click.option(
"--extras",
type=str,
multiple=True,
default=None,
help="List of extra installations for the local package. Only used if using ``requirements``",
)
@click.option(
"--deps",
"-d",
type=str,
multiple=True,
default=None,
help="Additional `pip` dependencies to install. Only used if using ``requirements``.",
)
@click.option(
"--command",
type=str,
default="pytest",
help="The test command to use in each environment. Only used if using ``requirements``.",
)
@click.option(
"--export",
is_flag=True,
help="Whether or not to export the updated requirements file. Overwrites input requirements.",
)
def cli(
config,
requirements,
environment,
notest,
nosetup,
extras,
deps,
command,
export,
):
"""Create the environments and test.
If you do not supply a configuration file, this package will search for a
``requirements.txt`` file and create a conda environment for each package in that file.
"""
# Get the hooks
pm = get_plugin_manager()
if config and Path(config).suffix == ".cfg":
conf = parse_cfg(filename=config, requirements=requirements)
elif config and Path(config).suffix == ".toml":
conf = parse_toml(filename=config, requirements=requirements)
else:
# Find the path to the local directory using the requirements file
conf = gen_requirements_config(
fname_or_buf=requirements,
extras=extras,
deps=deps,
command=command,
package_dir=str(Path(requirements).parent),
)
# Validate the configuration file
docstructure = Schema()
pm.hook.addoption(schema=docstructure)
validator = EdgetestValidator(schema=docstructure.schema)
if not validator.validate(conf):
click.echo(f"Unable to validate configuration file. Error: {validator.errors}")
raise ValueError("Unable to validate configuration file.")
conf = validator.document
if environment:
conf["envs"] = [env for env in conf["envs"] if env["name"] == environment]
# Run the pre-test hook
pm.hook.pre_run_hook(conf=conf)
testers: List[TestPackage] = []
for env in conf["envs"]:
testers.append(
TestPackage(
hook=pm.hook,
envname=env["name"],
upgrade=env["upgrade"],
package_dir=env["package_dir"],
)
)
# Set up the test environment
if nosetup:
click.echo(f"Using existing environment for {env['name']}...")
else:
testers[-1].setup(**env)
# Run the tests
if notest:
click.echo(f"Skipping tests for {env['name']}")
else:
testers[-1].run_tests(env["command"])
report = gen_report(testers)
click.echo(f"\n\n{report}")
if export and testers[-1].status:
if config is not None and Path(config).name == "setup.cfg":
parser = upgrade_setup_cfg(
upgraded_packages=testers[-1].upgraded_packages(),
filename=config,
)
with open(config, "w") as outfile:
parser.write(outfile)
if "options" not in parser or not parser.get("options", "install_requires"):
click.echo(
"No PEP-517 style requirements in ``setup.cfg`` to update. Updating "
f"{requirements}"
)
upgraded = upgrade_requirements(
fname_or_buf=requirements,
upgraded_packages=testers[-1].upgraded_packages(),
)
with open(requirements, "w") as outfile:
outfile.write(upgraded)
elif config is not None and Path(config).name == "pyproject.toml":
parser = upgrade_pyproject_toml(
upgraded_packages=testers[-1].upgraded_packages(),
filename=config,
)
with open(config, "w") as outfile:
outfile.write(dumps(parser))
if "project" not in parser or not parser.get("project").get("dependencies"):
click.echo(
"No dependencies in ``pyproject.toml`` to update. Updating "
f"{requirements}"
)
upgraded = upgrade_requirements(
fname_or_buf=requirements,
upgraded_packages=testers[-1].upgraded_packages(),
)
with open(requirements, "w") as outfile:
outfile.write(upgraded)
else:
click.echo(f"Overwriting the requirements file {requirements}...")
upgraded = upgrade_requirements(
fname_or_buf=requirements,
upgraded_packages=testers[-1].upgraded_packages(),
)
with open(requirements, "w") as outfile:
outfile.write(upgraded)
# Run the post-test hook
pm.hook.post_run_hook(testers=testers, conf=conf)
|
0 | capitalone_repos/edgetest | capitalone_repos/edgetest/edgetest/lib.py | """Default virtual environment hook."""
import platform
from pathlib import Path
from typing import Dict, List
from venv import EnvBuilder
import pluggy
from .utils import _run_command
hookimpl = pluggy.HookimplMarker("edgetest")
@hookimpl(trylast=True)
def path_to_python(basedir: str, envname: str) -> str:
"""Return the path to the python executable."""
if platform.system() == "Windows":
return str(Path(basedir) / envname / "Scripts" / "python")
else:
return str(Path(basedir) / envname / "bin" / "python")
@hookimpl(trylast=True)
def create_environment(basedir: str, envname: str, conf: Dict):
"""Create the virtual environment for testing.
Creates an environment using ``venv``.
Parameters
----------
basedir : str
The base directory location for the environment.
envname : str
The name of the virtual environment.
conf : dict
Ignored.
Raises
------
RuntimeError
Error raised if the environment cannot be created.
"""
builder = EnvBuilder(with_pip=True)
try:
builder.create(env_dir=Path(basedir, envname))
except Exception:
raise RuntimeError(f"Unable to create {envname} in {basedir}")
@hookimpl(trylast=True)
def run_update(basedir: str, envname: str, upgrade: List, conf: Dict):
"""Update packages from upgrade list.
Parameters
----------
basedir : str
The base directory location for the environment.
envname : str
The name of the virtual environment.
upgrade : list
The list of packages to upgrade
conf : dict
Ignored.
Raises
------
RuntimeError
Error raised if the packages cannot be updated.
"""
python_path = path_to_python(basedir, envname)
try:
_run_command(
python_path,
"-m",
"pip",
"install",
*upgrade,
"--upgrade",
)
except Exception:
raise RuntimeError(f"Unable to pip upgrade: {upgrade}")
|
0 | capitalone_repos/edgetest | capitalone_repos/edgetest/edgetest/utils.py | """Utility functions."""
import os
from configparser import ConfigParser
from contextlib import contextmanager
from copy import deepcopy
from pathlib import Path
from subprocess import PIPE, Popen
from typing import Any, Dict, List, Optional, Tuple, Union
from packaging.specifiers import SpecifierSet
from pkg_resources import parse_requirements
from tomlkit import TOMLDocument, load
from tomlkit.container import Container
from tomlkit.items import Array, Item, String, Table
from .logger import get_logger
LOG = get_logger(__name__)
def _run_command(*args) -> Tuple[str, int]:
"""Run a command using ``subprocess.Popen``.
Parameters
----------
*args
Arguments for the command.
Returns
-------
str
The output
int
The exit code
Raises
------
RuntimeError
Error raised when the command is not successfully executed.
"""
LOG.debug(f"Running the following command: \n\n {' '.join(args)}")
popen = Popen(args, stdout=PIPE, universal_newlines=True)
out, _ = popen.communicate()
if popen.returncode:
raise RuntimeError(
f"Unable to run the following command: \n\n {' '.join(args)}"
)
return out, popen.returncode
@contextmanager
def pushd(new_dir: str):
"""Create a context manager for running commands in sub-directories.
Parameters
----------
new_dir : str
The relative directory to run the command in.
"""
curr_dir = Path.cwd()
os.chdir(curr_dir / new_dir)
try:
yield
finally:
os.chdir(curr_dir)
def _convert_toml_array_to_string(item: Union[Item, Any]) -> str:
if isinstance(item, Array):
return "\n".join(item)
elif isinstance(item, String):
return str(item)
else:
raise ValueError
def convert_requirements(requirements: str, conf: Optional[Dict] = None) -> Dict:
"""Generate environments for a newline-separate list of package requirements.
This function will generate one environment per entry with an additional environment
that upgrades all requirements simultaneously.
Parameters
----------
requirements : str
The requirements string.
conf : dict, optional (default None)
An existing configuration to edit.
Returns
-------
Dict
A configuration dictionary.
"""
conf = {"envs": []} if conf is None else conf
pkgs = [pkg.project_name for pkg in parse_requirements(requirements)]
for pkg in pkgs:
conf["envs"].append({})
conf["envs"][-1]["name"] = pkg
conf["envs"][-1]["upgrade"] = pkg
# Create an environment with all requirements upgraded
conf["envs"].append({})
conf["envs"][-1]["name"] = "all-requirements"
conf["envs"][-1]["upgrade"] = "\n".join(pkgs)
return conf
def gen_requirements_config(fname_or_buf: str, **options) -> Dict:
"""Generate a configuration file from package requirements.
This function will convert the package installation requirements to a configuration
file with one environment per requirement.
Parameters
----------
fname_or_buf : str
Path to the requirements file to parse using ``pkg_resources.parse_requirements``
or the string representing the requirements file.
**options
Options to apply to each test environment.
Returns
-------
Dict
The configuration file.
"""
# First, get the requirements
if Path(fname_or_buf).is_file():
with open(fname_or_buf) as infile:
cfg = infile.read()
else:
cfg = fname_or_buf
output = convert_requirements(requirements=cfg)
for index in range(len(output["envs"])):
output["envs"][index].update(options)
return output
def parse_cfg(filename: str = "setup.cfg", requirements: Optional[str] = None) -> Dict:
"""Generate a configuration from a ``.ini`` style file.
This function can operate in two ways. First, it can look for sections that
start with ``edgetest`` and build a configuration. Suppose
you have ``setup.cfg`` as follows:
.. code-block:: ini
[edgetest.envs.pandas]
upgrade =
pandas
This will result in a configuration that has one testing environment, named
``pandas``, that upgrades the ``pandas`` package.
If you don't have any sections that start with ``edgetest.envs``, we will look for
the PEP 517-style ``setup.cfg`` install requirements (the ``install_requires`` key
within the ``options`` section). To set global defaults for you environments, use
the ``edgetest`` section:
.. code-block:: ini
[edgetest]
extras =
tests
command =
pytest tests -m "not integration"
[edgetest.envs.pandas]
upgrade =
pandas
For this single environment file, the above configuration is equivalent to
.. code-block:: ini
[edgetest.envs.pandas]
extras =
tests
command =
pytest tests -m "not integration"
upgrade =
pandas
Parameters
----------
filename : str, optional (default "setup.cfg")
The name of the configuration file to read. Defaults to ``setup.cfg``.
requirements : str, optional (default None)
An optional path to the requirements text file. If there are no PEP-517
style dependencies or coded environments in the edgetest configuration, this
function will look for dependencies in the requirements file.
Returns
-------
Dict
A configuration dictionary for ``edgetest``.
"""
# Read in the configuration file
config = ConfigParser()
config.read(filename)
# Parse
output: Dict = {"envs": []}
# Get any global options if necessary
options = dict(config["edgetest"]) if "edgetest" in config else {}
# Next, create the sections
for section in config.sections():
if not section.startswith("edgetest."):
continue
# Look for the special ``envs`` key
section_name = section.split(".")
if section_name[1] == "envs":
output["envs"].append(dict(config[section]))
output["envs"][-1]["name"] = section_name[2]
else:
output[section_name[1]] = dict(config[section])
if len(output["envs"]) == 0:
if config.get("options", "install_requires"):
output = convert_requirements(
requirements=config["options"]["install_requires"], conf=output
)
elif requirements:
req_conf = gen_requirements_config(fname_or_buf=requirements)
output["envs"] = req_conf["envs"]
else:
raise ValueError("Please supply a valid list of environments to create.")
# Apply global environment options (without overwriting)
for idx in range(len(output["envs"])):
output["envs"][idx] = dict(
list(options.items()) + list(output["envs"][idx].items())
)
return output
def parse_toml(
filename: str = "pyproject.toml", requirements: Optional[str] = None
) -> Dict:
"""Generate a configuration from a ``.toml`` style file.
This function can operate in two ways. First, it will look for tables that
start with ``edgetest`` and build a configuration. Suppose
you have ``pyproject.toml`` as follows:
.. code-block:: toml
[edgetest.envs.pandas]
upgrade = [
"pandas"
]
This will result in a configuration that has one testing environment, named
``pandas``, that upgrades the ``pandas`` package.
If you don't have any tables that start with ``edgetest.envs``, we will look for
the installation requirements (the ``dependencies`` key within the ``project`` section).
To set global defaults for you environments, use the ``edgetest`` table:
.. code-block:: toml
[edgetest]
extras = [
"tests"
]
command = "pytest tests -m 'not integration'"
[edgetest.envs.pandas]
upgrade = [
"pandas"
]
For this single environment file, the above configuration is equivalent to
.. code-block:: toml
[edgetest.envs.pandas]
extras = [
"tests"
]
command = "pytest tests -m 'not integration'"
upgrade = [
"pandas"
]
Parameters
----------
filename : str, optional (default "pyproject.toml")
The name of the toml file to read. Defaults to ``pyproject.toml``.
requirements : str, optional (default None)
An optional path to the requirements text file. If there are no TOML
style dependencies or coded environments in the edgetest configuration, this
function will look for dependencies in the requirements file.
Returns
-------
Dict
A configuration dictionary for ``edgetest``.
"""
options: Union[Item, Container, dict]
# Read in the configuration file
config: TOMLDocument = load(open(filename))
# Parse
output: Dict = {"envs": []}
# Get any global options if necessary. First scan through and pop out any Tables
temp_config = deepcopy(config)
if "edgetest" in config:
for j in config["edgetest"].items(): # type: ignore
if isinstance(config["edgetest"][j[0]], Table): # type: ignore
_ = temp_config["edgetest"].pop( # type: ignore
j[0], None
) # remove Tables from the temp config
else:
temp_config["edgetest"][j[0]] = _convert_toml_array_to_string( # type: ignore
temp_config["edgetest"][j[0]] # type: ignore
)
options = temp_config["edgetest"]
else:
options = {}
# Check envs exists and any other Tables
if "edgetest" in config:
for section in config["edgetest"]: # type: ignore
if section == "envs":
for env in config["edgetest"]["envs"]: # type: ignore
for item in config["edgetest"]["envs"][env]: # type: ignore
# If an Array then decompose to a string format
config["edgetest"]["envs"][env][ # type: ignore
item
] = _convert_toml_array_to_string(
config["edgetest"]["envs"][env][item] # type: ignore
)
output["envs"].append(dict(config["edgetest"]["envs"][env])) # type: ignore
output["envs"][-1]["name"] = env
elif isinstance(config["edgetest"][section], Table): # type: ignore
output[section] = dict(config["edgetest"][section]) # type: ignore
if len(output["envs"]) == 0:
if config.get("project").get("dependencies"): # type: ignore
output = convert_requirements(
requirements="\n".join(config["project"]["dependencies"]), conf=output # type: ignore # noqa: E501
)
elif requirements:
req_conf = gen_requirements_config(fname_or_buf=requirements)
output["envs"] = req_conf["envs"]
else:
raise ValueError("Please supply a valid list of environments to create.")
# Apply global environment options (without overwriting)
for idx in range(len(output["envs"])):
output["envs"][idx] = dict(
list(options.items()) + list(output["envs"][idx].items()) # type: ignore
)
return output
def upgrade_requirements(
fname_or_buf: str, upgraded_packages: List[Dict[str, str]]
) -> str:
"""Create an upgraded requirements file.
Parameters
----------
fname_or_buf : str
Path to the requirements file to parse using ``pkg_resources.parse_requirements``
or the string representing the requirements file.
upgraded_packages : list
A list of packages upgraded in the testing procedure.
Returns
-------
str
The string file representing the new requirements file.
"""
# Get the existing file
try:
if Path(fname_or_buf).is_file():
with open(fname_or_buf) as infile:
cfg = infile.read()
else:
cfg = fname_or_buf
except OSError:
# Filename too long for the is_file() function
cfg = fname_or_buf
pkgs = [pkg for pkg in parse_requirements(cfg)]
upgrades = {pkg["name"]: pkg["version"] for pkg in upgraded_packages}
for pkg in pkgs:
if pkg.project_name not in upgrades:
continue
# Replace the spec
specs = deepcopy(pkg.specs)
for index, value in enumerate(specs):
if value[0] == "<=":
pkg.specs[index] = ("<=", upgrades[pkg.project_name])
elif value[0] == "<":
pkg.specs[index] = ("!=", value[1])
pkg.specs.append(("<=", upgrades[pkg.project_name]))
elif value[0] == "==":
pkg.specs = [(">=", value[1]), ("<=", upgrades[pkg.project_name])]
pkg.specifier = SpecifierSet(",".join("".join(spec) for spec in pkg.specs)) # type: ignore
return "\n".join(str(pkg) for pkg in pkgs)
def upgrade_setup_cfg(
upgraded_packages: List[Dict[str, str]], filename: str = "setup.cfg"
) -> ConfigParser:
"""Upgrade the ``setup.cfg`` file.
Parameters
----------
upgraded_packages : List[Dict[str, str]]
A list of packages upgraded in the testing procedure.
filename : str, optional (default "setup.cfg")
The name of the configuration file to read. Defaults to ``setup.cfg``.
Returns
-------
ConfigParser
The updated configuration file.
"""
parser = ConfigParser()
parser.read(filename)
if "options" in parser and parser.get("options", "install_requires"):
LOG.info(f"Updating the requirements in {filename}")
upgraded = upgrade_requirements(
fname_or_buf=parser["options"]["install_requires"].lstrip(),
upgraded_packages=upgraded_packages,
)
parser["options"]["install_requires"] = "\n" + upgraded
# Update the extras, if necessary
if "options.extras_require" in parser:
for extra, dependencies in parser.items("options.extras_require"):
upgraded = upgrade_requirements(
fname_or_buf=dependencies,
upgraded_packages=upgraded_packages,
)
parser["options.extras_require"][extra] = "\n" + upgraded
return parser
def upgrade_pyproject_toml(
upgraded_packages: List[Dict[str, str]], filename: str = "pyproject.toml"
) -> TOMLDocument:
"""Upgrade the ``pyproject.toml`` file.
Parameters
----------
upgraded_packages : List[Dict[str, str]]
A list of packages upgraded in the testing procedure.
filename : str, optional (default "pyproject.toml")
The name of the configuration file to read. Defaults to ``pyproject.toml``.
Returns
-------
TOMLDocument
The updated TOMLDocument.
"""
parser: TOMLDocument = load(open(filename))
if "project" in parser and parser.get("project").get("dependencies"): # type: ignore
LOG.info(f"Updating the requirements in {filename}")
upgraded = upgrade_requirements(
fname_or_buf="\n".join(parser["project"]["dependencies"]), # type: ignore
upgraded_packages=upgraded_packages,
)
parser["project"]["dependencies"] = upgraded.split("\n") # type: ignore
# Update the extras, if necessary
if parser.get("project").get("optional-dependencies"): # type: ignore
for extra, dependencies in parser["project"]["optional-dependencies"].items(): # type: ignore # noqa: E501
upgraded = upgrade_requirements(
fname_or_buf="\n".join(dependencies),
upgraded_packages=upgraded_packages,
)
parser["project"]["optional-dependencies"][extra] = upgraded.split("\n") # type: ignore
return parser
def _isin_case_dashhyphen_ins(a: str, vals: List[str]) -> bool:
"""Run isin check that is case and dash/hyphen insensitive.
Paramaters
----------
a : str
String value to check for membership against ``vals``.
vals : list of str
List of strings to check ``a`` against.
Returns
-------
bool
Return ``True`` if ``a`` in vals, otherwise ``False``.
"""
for b in vals:
if a.replace("_", "-").lower() == b.replace("_", "-").lower():
return True
return False
|
0 | capitalone_repos/edgetest | capitalone_repos/edgetest/edgetest/__init__.py | """Package initialization."""
__version__ = "2023.6.1"
__title__ = "edgetest"
__description__ = "Bleeding edge dependency testing"
__url__ = "https://github.com/capitalone/edgetest"
__uri__ = __url__
__doc__ = __description__ + " <" + __uri__ + ">"
__author__ = "Akshay Gupta"
__email__ = "akshay.gupta2@capitalone.com"
|
0 | capitalone_repos | capitalone_repos/Particle-Cloud-Framework/STYLEGUIDE.md | # Particle Cloud Framework Style Guide
This is a Python Style Guide for the PCF Framework. The intent of this guide is to strive for cleaner and better code quality. Following this guide, our code will be approachable to anyone who needs to maintain it later, including ourselves!
## Table of Contents
1. [Virtual Environments](#venv)
2. [Imports](#imports)
3. [Naming](#naming)
i. [Variables](#variables)
ii. [Class names](#classes)
iii. [Functions/Methods](#funcs)
4. [Spacing](#spacing)
5. [Strings](#strings)
i. [Quotes](#quotes)
ii. [String Formatting](#strformat)
iii. [Docstrings](#docstr)
6. [Dictionary Key-Value Retrieval](#dict)
7. [Method Returns](#returns)
8. [PCF Utility Functions](#pcf_util)
9. [PCF Exceptions](#pcf_excep)
10. [Logging Standards](#logging)
## <a name="venv">Virtual Environments</a>
Conda is the preferred virtual environment setup for testing and development. However, Python v3.3+ include the native
Python library `venv` (virtual environment). The steps for setting up a virtual environment with `venv` can be found <a href="https://docs.python.org/3/library/venv.html">here</a>.
### Conda
Conda is a package management system that can help you easily keep track of the package requirements
for projects and install them automatically for you.
You can set up and share collections of packages called environments.
For sake of clarity, all references will be intended for MacOS users.
#### Installation
Visit this <a href="https://docs.anaconda.com/anaconda/install/">link</a> to
find installation details for your appropriate OS.
<a href="https://www.anaconda.com/download/#macos"> MacOs</a>
Click *version 3.7*
### Create a new virtual env
Visit this
<a href="https://conda.io/docs/user-guide/getting-started.html#starting-conda">link</a>
to find documentation on your appropriate OS.
All conda commands will be typed in the Terminal window.
```commandline
conda create --name pcf-local python==3.6
```
*PCF requires Python version 3.6+*
### Activate virutal env
```commandline
source activate pcf-local
```
### Deactivate virtual env
```commandline
source deactivate
```
### View list of exisiting virtual envs
```commandline
conda info --envs
```
## <a name="imports">Imports</a>
Imports are always put at the top of the file, just after any module comments and docstrings, and before module globals and constants.
Refrain from importing entire modules when only 1 object from the module is needed. For consistency, entire import modules should be imported first. (`import module`)
Then, imported functions from modules should be imported. (`from module import function`)
**Don't:**
```python
from time import sleep
import boto3, json, logging
import pcf.core.PCF_exceptions
```
**Do:**
```python
import boto3
import json
import logging
from pcf.core.pcf_exceptions import NoResourceException
from time import sleep
```
## <a name="naming">Naming</a>
PCF typically follows the same naming conventions as <a href="https://visualgit.readthedocs.io/en/latest/pages/naming_convention.html">PEP8 standards</a>.
### <a name="variables">Variables</a>
```python
# Lowercase multi-word separated by an underscore
my_greeting = "hello"
```
```python
# Non-public instance variables should begin with a single underscore
_client = ""
```
### <a name="classes">Classes</a>
```python
# UpperCaseCamelCase convention
class LambaFunction():
# class
class AWSResource():
# class
class EC2Instance():
# class
```
### <a name="funcs">Fucntions/Methods</a>
```python
# Lowercase multi-word separated by an underscore
def get_status(self):
pass
```
```python
# Non-public methods should begin with a single underscore
def _terminate(self):
pass
```
## <a name="spacing">Spacing</a>
* 4 spaces = 1 indent
* Leave a single blank line after function/method declarations to aid in visual clarity and organization
## <a name="strings">Strings</a>
### <a name="quotes">Quotes</a>
In Python single quotes and double quotes are used interchangablely. We will stick to double quotes for consistency.
**Don't:**
```python
nameOfSchool = 'Marshall'
nameOfOffice = "Clarendon"
```
**Do:**
```python
nameOfSchool = "Marshall"
nameOfOffice = "Clarendon"
```
### <a name="strformat">String Formatting</a>
As of Python 3.6, f-strings (formatted string literals) have optimized the former way of formatting strings: %-formatting and str.format().
Learn more about the f-strings <a href="https://docs.python.org/3/reference/lexical_analysis.html#f-strings">here</a>.
**Don't:**
```python
def generate_pcf_id(flavor, pcf_name):
return "{}:{}".format(flavor, pcf_name)
```
**Do:**
```python
def generate_pcf_id(flavor, pcf_name):
return f"{flavor}:{pcf_name}" # f and F are to be used interchangeably
```
### <a name="docstr">Docstrings</a>
Docstrings act as documentation for method definitions of a class within PCF. Always use """triple double quotes""" around docstrings. For multi-line docstrings, place the closing qoutes on a line by itself.
The docstrings should give a brief description of what the method/class is doing, explain arguments and their type, if any, and list what the method/class returns.
> If methods and classes are not documented this way they will not be merged in.
For more info see the <a href="https://www.python.org/dev/peps/pep-0257/#what-is-a-docstring">docs</a>.
**Example:**
```python
def is_state_equivalent(self, state1, state2):
"""
Determines if states are equivalent. Uses equivalent_states defined in the Glacier class.
Args:
state1 (State):
state1 (State):
Returns:
bool
"""
```
## <a name="dict">Dictionary Key-Value Retrieval</a>
When retrieving a value from a dictionary key, do not use square bracket notation. This method will return an error if the
indicated key does not exist in the dictionary. Instead, use the `get` method which returns `None` by default if the
indicated key does not exist in the dictionary.
**Don't:**
```python
bucket_name = desired_state_definition["Bucket"]
```
**Do:**
```python
# dict.get("Key", default=None)
bucket_name = desired_state_definition.get("Bucket")
```
## <a name="returns">Method Returns</a>
Methods that returns dictionaries should always return an empty dictionary, `{}`,
rather than `None` in the case that there is nothing to return. The reason being is
so that typing is consistent and so that we can utilize the built-in boolean valuation of dictionaries.
Example:
```python
def _get_alias(self):
"""
Returns the alias record for the provided key_name in custom configuration.
Returns:
{} (dict) Containing nothing, or the keys below:
- AliasName (str) - The alias name (always starts with "alias/")
- AliasArn (str) - The ARN for the key alias
- TargetKeyId (str) - The unique identifier for the key the alias is\
associated with
"""
alias_list = self.client.list_aliases()
for alias in alias_list.get('Aliases'):
if alias.get('AliasName') == 'alias/' + self.key_name:
return alias
return {}
```
## <a name="pcf_util">PCF Utility Fucntions</a>
There are several functions in <a href="https://github.com/capitalone/Particle-Cloud-Framework/blob/master/pcf/util/pcf_util.py">pcf_util.py</a>
that perform various tasks such as creating a dictionary passed on a key set and finding nested values in a dictionary.
Refer to this module when performing such complex operations. If there is not an existing function that meets your needs, simply
create one. The desired functions are then imported into the particular module you are implementing.
Example:
```python
def param_filter(curr_dict, key_set, remove=False):
"""
Filters param dictionary to only have keys in the key set
Args:
curr_dict (dict): param dictionary
key_set (set): set of keys you want
remove (bool): filters by what to remove instead of what to keep
Returns:
filtered param dictionary
"""
if remove:
return {key: curr_dict[key] for key in curr_dict.keys() if key not in key_set}
else:
return {key: curr_dict[key] for key in key_set if key in curr_dict.keys()}
```
## <a name="pcf_excep">PCF Exceptions</a>
There are several exceptions in <a href="https://github.com/capitalone/Particle-Cloud-Framework/blob/master/pcf/core/pcf_exceptions.py">pcf_exceptions.py</a>
that define various exceptions. Refer to this module when considering exception handling. If there is not an existing exception that meets your needs, simply
create one. The desired exceptions are then imported into the particular module you are implementing.
Example:
```python
class NoCodeException(Exception):
def __init__(self):
Exception.__init__(self, "Did not provide local zipfile or zipfile location in S3")
```
## <a name="logging">Logging Standards</a>
Logging is a means of tracking events that happen when some software runs. More information on logging in Python can be
found <a href="https://docs.python.org/3/howto/logging.html#logging-basic-tutorial">here</a>.
To enable logging in PCF, add the following code to the top of your pcf python file.
Example:
```python
import logging
logger = logging.getLogger(__name__)
def get_state(self):
"""
Calls sync state and afterward returns the current state. Uses cached state if available.
Returns:
state
"""
if not self.use_cached_state():
self.sync_state()
self.state_last_refresh_time = time.time()
logger.info(f"Refreshed state for {self.pcf_id}: {self.state}")
else:
logger.debug(f"Using cached state for {self.pcf_id}: {self.state}")
return self.state
```
|
0 | capitalone_repos | capitalone_repos/Particle-Cloud-Framework/requirements.txt | boto==2.48.0
boto3==1.9.143
botocore==1.12.143
Jinja2==2.11.3
google-cloud-storage==1.15.0
google-compute-engine==2.8.13
google-api-python-client==1.7.4
commentjson==0.7.1
deepdiff==4.0.6
azure-common==1.1.20
azure-mgmt-compute==4.6.2
azure-mgmt-network==2.7.0
azure-mgmt-resource==4.0.0
azure-mgmt-storage==3.3.0
azure-storage-blob==1.5.0
azure-storage-common==1.4.0
azure-cli-core==2.0.57
click==7.0
python-Levenshtein==0.12.0
pyyaml==5.4
|
0 | capitalone_repos | capitalone_repos/Particle-Cloud-Framework/tasks.py | """ Project Management Tasks """
import os
from invoke import task, Collection
namespace = Collection()
@task
def setup(ctx):
""" Setup a virtualenv, activate it, and install requirements """
ctx.run("virtualenv venv && source venv/bin/activate")
ctx.run("pip install -r requirements.txt -r requirements-dev.txt")
@task
def docs_add(ctx):
""" Run sphinx-apidoc on pcf and pcf/test """
ctx.run("cd docs; sphinx-apidoc -o source ../pcf ../pcf/test/*")
@task
def lint(ctx):
""" Run pylint on pcf directory """
ctx.run("pylint pcf")
@task
def test(ctx):
""" Run pytest on pcf directory, generating a coverage report """
ctx.run("pytest --cov-config .coveragerc --cov=pcf --cov-report term-missing")
@task
def build(ctx, pcf_tag=None):
""" Build PCF with the PCF_TAG value given or the VERSION in pcf/__init__.py """
if pcf_tag:
os.environ['PCF_TAG'] = pcf_tag
ctx.run("python setup.py bdist_wheel")
@task(build)
def publish(ctx):
""" Publish package to Pypi """
ctx.run("python -m twine upload dist/*")
|
0 | capitalone_repos | capitalone_repos/Particle-Cloud-Framework/.coveragerc | [run]
omit = pcf/test/*
|
0 | capitalone_repos | capitalone_repos/Particle-Cloud-Framework/LICENSE | Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
|
0 | capitalone_repos | capitalone_repos/Particle-Cloud-Framework/.pylintrc | [MASTER]
# Add files or directories to the blacklist. They should be base names, not
# paths.
ignore=test
# Pickle collected data for later comparisons.
persistent=yes
[MESSAGES CONTROL]
#enable=
disable=C0301,F0401,W0141,W0611,W0110,W0703,W0142,C0103,C0111,C0302,I0010,I0011,R0801,R0901,R0902,R0903,R0904,R0911,R0912,R0913,R0914,R0915,R0921,R0922,W0122,W0141,W0142,W0402,W0404,W0511,W0603,W0703,W1201,bad-continuation,anomalous-backslash-in-string,bad-context-manager,bad-indentation,bad-str-strip-call,bad-whitespace,cell-var-from-loop,deprecated-lambda,eval-used,function-redefined,import-error,locally-enabled,missing-final-newline,no-init,no-name-in-module,no-self-use,not-callable,old-style-class,protected-access,superfluous-parens,super-on-old-class,too-many-function-args,trailing-whitespace,unnecessary-semicolon,unpacking-non-sequence,unused-import,useless-else-on-loop,C0103,C0111,C0302,I0010,I0011,R0801,R0901,R0902,R0903,R0904,R0911,R0912,R0913,R0914,R0915,R0921,R0922,W0122,W0141,W0142,W0402,W0404,W0511,W0603,W0703,W1201
[REPORTS]
output-format=text
files-output=no
reports=no
evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
comment=no
[TYPECHECK]
ignore-mixin-members=yes
ignored-classes=SQLObject,twisted.internet.reactor,hashlib,google.appengine.api.memcache
generated-members=REQUEST,acl_users,aq_parent,multiprocessing.managers.SyncManager
[MISCELLANEOUS]
notes=FIXME,XXX,TODO
[SIMILARITIES]
min-similarity-lines=4
ignore-comments=yes
ignore-docstrings=yes
[VARIABLES]
# Tells whether we should check for unused import in __init__ files.
init-import=no
# A regular expression matching the beginning of the name of dummy variables
# (i.e. not used).
dummy-variables-rgx=_|dummy
# List of additional names supposed to be defined in builtins. Remember that
# you should avoid to define new builtins when possible.
additional-builtins=
[BASIC]
# List of builtins function names that should not be used, separated by a comma
bad-functions=map,filter,apply,input
# Regular expression which should only match correct module names
module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$
# Regular expression which should only match correct module level names
const-rgx=(([A-Z_][A-Z0-9_]*)|(__.*__))$
# Regular expression which should only match correct class names
class-rgx=[A-Z_][a-zA-Z0-9]+$
# Regular expression which should only match correct function names
function-rgx=[a-z_][a-z0-9_]{2,30}$
# Regular expression which should only match correct method names
method-rgx=[a-z_][a-z0-9_]{2,30}$
# Regular expression which should only match correct instance attribute names
attr-rgx=[a-z_][a-z0-9_]{2,30}$
# Regular expression which should only match correct argument names
argument-rgx=[a-z_][a-z0-9_]{2,30}$
# Regular expression which should only match correct variable names
variable-rgx=[a-z_][a-z0-9_]{2,30}$
# Regular expression which should only match correct list comprehension /
# generator expression variable names
inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$
# Good variable names which should always be accepted, separated by a comma
# ax is used for matplotlib axes
# sc is used for Spark context
good-names=i,j,k,ex,Run,_,ax,sc
# Bad variable names which should always be refused, separated by a comma
bad-names=foo,bar,baz,toto,tutu,tata
# Regular expression which should only match functions or classes name which do
# not require a docstring
no-docstring-rgx=__.*__
[FORMAT]
# Maximum number of characters on a single line.
max-line-length=100
# Regexp for a line that is allowed to be longer than the limit.
ignore-long-lines=^\s*(# )?<?https?://\S+>?$
# Maximum number of lines in a module
max-module-lines=1000
indent-string=' '
[IMPORTS]
# Deprecated modules which should not be used, separated by a comma
deprecated-modules=regsub,TERMIOS,Bastion,rexec,string
# Create a graph of every (i.e. internal and external) dependencies in the
# given file (report RP0402 must not be disabled)
import-graph=
# Create a graph of external dependencies in the given file (report RP0402 must
# not be disabled)
ext-import-graph=
# Create a graph of internal dependencies in the given file (report RP0402 must
# not be disabled)
int-import-graph=
[DESIGN]
# Maximum number of arguments for function / method
max-args=5
# Argument names that match this expression will be ignored. Default to name
# with leading underscore
ignored-argument-names=_.*
# Maximum number of locals for function / method body
max-locals=15
# Maximum number of return / yield for function / method body
max-returns=6
# Maximum number of branch for function / method body
max-branches=12
# Maximum number of statements in function / method body
max-statements=50
# Maximum number of parents for a class (see R0901).
max-parents=7
# Maximum number of attributes for a class (see R0902).
max-attributes=7
# Minimum number of public methods for a class (see R0903).
min-public-methods=2
# Maximum number of public methods for a class (see R0904).
max-public-methods=20
[CLASSES]
# List of method names used to declare (i.e. assign) instance attributes.
defining-attr-methods=__init__,__new__,setUp
# List of valid names for the first argument in a class method.
valid-classmethod-first-arg=cls
[EXCEPTIONS]
# Exceptions that will emit a warning when being caught. Defaults to
# "Exception"
overgeneral-exceptions=Exception
|
0 | capitalone_repos | capitalone_repos/Particle-Cloud-Framework/CODEOWNERS | * @anovis @ethanwlo @davidyum
|
0 | capitalone_repos | capitalone_repos/Particle-Cloud-Framework/.editorconfig | # EditorConfig is awesome: http://EditorConfig.org
# top-most EditorConfig file
root = true
# Unix-style newlines with a newline ending every file
[*]
end_of_line = lf
insert_final_newline = true
# Matches the exact files either package.json or .travis.yml
[*.yml]
indent_style = space
indent_size = 2
[*.py]
indent_style = space
indent_size = 4
|
0 | capitalone_repos | capitalone_repos/Particle-Cloud-Framework/README.md | # Due to changes in the priorities, this project is currently not being supported. The project is archived as of 9/17/21 and will be available in a read-only state. Please note, since archival, the project is not maintained or reviewed. #
[![Build Status](https://img.shields.io/travis/capitalone/Particle-Cloud-Framework/master.svg?label=master)](https://travis-ci.org/capitalone/Particle-Cloud-Framework)
[![Build Status](https://img.shields.io/travis/capitalone/Particle-Cloud-Framework/develop.svg?label=develop)](https://travis-ci.org/capitalone/Particle-Cloud-Framework)
[![Licence](https://img.shields.io/badge/license-Apache%202-blue.svg)](https://www.apache.org/licenses/LICENSE-2.0)
[![PyPi Version](https://img.shields.io/pypi/v/pcf.svg?maxAge=2592000)](https://pypi.org/project/pcf/)
[![Supported Python Versions](https://img.shields.io/pypi/pyversions/pcf.svg?label=Python)](https://pypi.org/project/pcf/)
# Particle Cloud Framework
Particle Cloud Framework is a cloud resource provisioning framework that is fully customizable and extensible, callable by code, and does not require manually maintaining states of resources. Particle Cloud Framework enables the standardization of modeling hierarchical cloud infrastructure, automating deployments, and managing lifecycles of cloud resources.
## Docs
[Docs](https://capitalone.github.io/Particle-Cloud-Framework/docs/build/html/index.html) including quickstart and developer guide
##
Installation
------------
To install particle cloud framework, open an interactive shell and run:
`pip install pcf`
Import and use a PCF Particle
-------------------------------
First import the particles you will use. These can be core particles or custom particles that you created.
See examples if you need help creating your config.
```
from pcf.core.ec2.ec2_instance import EC2Instance
```
Next we need to pass the desired state definition to the particle.
```
ec2_example_definition = {
"pcf_name": "ec2_example",
"flavor":"ec2",
"aws_resource": {
"ImageId": "ami-xxxxx",
"InstanceType": "t2.micro",
"KeyName": "secret-key-xxx",
"SecurityGroupIds": [
"sg-xxxxxx",
],
"SubnetId": "subnet-xxx",
"userdata_template_file": "userdata-script-xxxxx.sh",
"userdata_params": {},
"IamInstanceProfile": {
"Arn": "arn:aws:iam::xxxxxxxxx"
},
"InstanceInitiatedShutdownBehavior": "stop",
"tags": {
"NAME":"Value"
},
"BlockDeviceMappings": [
{
"DeviceName": "/dev/sda1",
"Ebs": {
"DeleteOnTermination": true,
"VolumeSize": 20,
"VolumeType": "gp2"
}
}
]
}
}
```
Now to start the ec2 instance using pcf simply initialize the particle and set the desired state to running and apply.
```
particle = EC2Instance(ec2_example_definition)
particle.set_desired_state('running')
particle.apply()
```
To terminate simply change the desired state to terminated and apply.
```
particle.set_desired_state('terminated')
particle.apply()
```
## Published Content
[*Just in Time Cloud Infrastructure:
Redefining the Relationship Between Applications and Cloud Infrastructure*](https://www.capitalone.com/tech/cloud/just-in-time-cloud-infrastructure)
## Supported Cloud Services
[Particles](https://capitalone.github.io/Particle-Cloud-Framework/docs/build/html/particlelist.html)
[Quasiparticles](https://capitalone.github.io/Particle-Cloud-Framework/docs/build/html/quasiparticlelist.html)
## Development Setup
To develop locally, clone this project and ensure you have the Invoke package installed globally via `pip` or `conda`:
```
$ pip install invoke
```
or
```
$ conda install invoke
```
Then you can use the project management tasks defined in `tasks.py` via the `invoke` CLI:
```
$ invoke --list
Available tasks:
build Build PCF with the PCF_TAG value given or the VERSION in pcf/__init__.py
docs-add Run sphinx-apidoc on pcf and pcf/test
lint Run pylint on pcf directory
publish Publish package to Pypi
setup Setup a virtualenv, activate it, and install requirements
test Run pytest on pcf directory, generating a coverage report
$ invoke setup && source venv/bin/activate
$ invoke test
```
## RoadMap
[Roadmap](https://capitalone.github.io/Particle-Cloud-Framework/docs/build/html/sections/roadmap.html)
## Contributors
We welcome Your interest in Capital One’s Open Source Projects (the
“Project”). Any Contributor to the Project must accept and sign an
Agreement indicating agreement to the license terms below. Except for
the license granted in this Agreement to Capital One and to recipients
of software distributed by Capital One, You reserve all right, title,
and interest in and to Your Contributions; this Agreement does not
impact Your rights to use Your own Contributions for any other purpose.
[Sign the Individual Agreement](https://docs.google.com/forms/d/19LpBBjykHPox18vrZvBbZUcK6gQTj7qv1O5hCduAZFU/viewform)
[Sign the Corporate Agreement](https://docs.google.com/forms/d/e/1FAIpQLSeAbobIPLCVZD_ccgtMWBDAcN68oqbAJBQyDTSAQ1AkYuCp_g/viewform?usp=send_form)
## Code of Conduct
This project adheres to the [Open Code of Conduct](https://developer.capitalone.com/resources/code-of-conduct)
By participating, you are
expected to honor this code.
|
0 | capitalone_repos | capitalone_repos/Particle-Cloud-Framework/Makefile | .PHONY: docs
# Makefile is not used in CICD. Kept around for legacy compatibility
# See tasks.py to edit the CICD pipeline
clean:
rm -rf bin/ lib/
install: clean
python3 -m venv .
bin/pip install -r requirements.txt
bin/pip install -e .
docs:
cd docs; make html
docs-add:
cd docs; sphinx-apidoc -o source ../pcf ../pcf/test/*
pypi-build:
export PCF_TAG=$(PCF_TAG)
python setup.py bdist_wheel
python -m twine upload dist/*
test:
pytest --cov-config .coveragerc --cov=pcf --cov-report term-missing
|
0 | capitalone_repos | capitalone_repos/Particle-Cloud-Framework/pytest.ini | [pytest]
addopts = --ignore examples -v
testpaths = pcf/test
env =
AWS_DEFAULT_REGION=us-east-1
|
0 | capitalone_repos | capitalone_repos/Particle-Cloud-Framework/requirements-dev.txt | pytest==4.4.1
pytest-bdd==3.1.0
pytest-cov==2.7.1
pytest-json==0.4.0
pytest-mock==1.10.4
pytest-env==0.6.2
coverage==4.5.3
moto==1.3.8
mock==3.0.5
pylint==2.3.1
twine==1.12.1
placebo==0.9.0
sphinx==1.8.3
sphinx_rtd_theme==0.4.2
invoke==1.1.1
recommonmark==0.5.0
|
0 | capitalone_repos | capitalone_repos/Particle-Cloud-Framework/setup.py | # Copyright 2018 Capital One Services, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from glob import glob
from pathlib import Path
from setuptools import setup, find_packages
from pcf import VERSION
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='pcf',
version=os.environ.get('PCF_TAG', VERSION),
description='pcf',
long_description=read('README.md'),
long_description_content_type='text/markdown',
author='anovis,bb1314,davidyum',
packages=find_packages(),
url='https://github.com/capitalone/Particle-Cloud-Framework',
entry_points='''
[console_scripts]
pcf=pcf.cli.cli:cli
''',
install_requires=[
"azure-storage-common==1.4.0",
"azure-storage-blob==1.5.0",
"azure-common==1.1.20",
"azure-mgmt-compute==4.6.2",
"azure-mgmt-resource==2.1.0",
"azure-mgmt-network==2.7.0",
"azure-mgmt-storage==3.3.0",
"azure-cli-core==2.0.57",
"boto==2.48.0",
"boto3==1.9.143",
"Jinja2==2.11.3",
"google-compute-engine==2.8.13",
"google-cloud-storage==1.15.0",
"google-api-python-client==1.7.4",
"commentjson==0.7.1",
"botocore==1.12.143",
"deepdiff==4.0.6",
"click==7.0",
"python-Levenshtein==0.12.0",
"pyyaml==5.4"
],
package_data={'pcf': glob('**/*.j2', recursive=True)},
include_package_data=True,
classifiers=[
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
],
)
|
0 | capitalone_repos | capitalone_repos/Particle-Cloud-Framework/.travis.yml | language: python
sudo: false
matrix:
fast_finish: true
include:
- python: 3.6
- python: 3.7
dist: xenial
sudo: true
allow_failures:
- python: 3.7
before_install:
- export AWS_SECRET_ACCESS_KEY=foobar_secret
- export AWS_ACCESS_KEY_ID=foobar_key
install:
- pip install invoke
- pip install -r requirements.txt -r requirements-dev.txt
script:
- invoke test
- invoke build
|
0 | capitalone_repos/Particle-Cloud-Framework/examples/tools | capitalone_repos/Particle-Cloud-Framework/examples/tools/pcf_generator/example_pcf_generator.py | # Example on how to use pcf generator tool
# used to create running resources
vpc_definition = {
"pcf_name": "pcf-example",
"flavor": "vpc_instance",
"aws_resource": {
"custom_config": {
"vpc_name": "example-vpc",
},
"CidrBlock":"10.0.0.0/16"
}
}
subnet_definition = {
"pcf_name": "pcf-example",
"flavor": "subnet",
"parents":["vpc_instance:pcf-example"],
"aws_resource": {
"custom_config": {
"subnet_name": "example-subnet",
},
"CidrBlock":"10.0.0.0/24",
"AvailabilityZone": "us-east-1d",
"AvailabilityZoneId": "use1-az6",
}
}
quasiparticle_definition = {
"pcf_name": "pcf-example",
"flavor": "quasiparticle",
"particles": [
vpc_definition,
subnet_definition,
]
}
# used to create subnet definition from running subnet
base_subnet_definition = {
"pcf_name":"example",
"flavor": "subnet",
"aws_resource": {
"custom_config": {
"subnet_name": "example-subnet",
},
}
}
base_quasiparticle_definition = {
"pcf_name":"example_quasiparticle",
"flavor":"quasiparticle",
"particles":[
{
"flavor": "vpc_instance",
"aws_resource": {
"custom_config": {
"vpc_name": "example-vpc",
}
}
},
{
"pcf_name":"example",
"flavor":"subnet",
"parents":["vpc_instance:example"],
"aws_resource":
{
"custom_config":{
"subnet_name":"example_subnet"
}
}
}
]
}
# create a vpc and subnet to be used for the example
from pcf.core.quasiparticle import Quasiparticle
from pcf.core import State
# from pcf.particle.aws.vpc.vpc_instance import VPC
# from pcf.particle.aws.vpc.subnet import Subnet
subnet_vpc_quasiparticle = Quasiparticle(quasiparticle_definition)
subnet_vpc_quasiparticle.set_desired_state(State.running)
subnet_vpc_quasiparticle.apply()
# get the full subnet definition and both print it and create pcf.json file with the definition
from pcf.tools.pcf_generator.pcf_generator import GenerateParticle
generated_subnet_particle = GenerateParticle(base_subnet_definition)
print(generated_subnet_particle.generate_definition())
generated_subnet_particle.generate_json_file()
#
# # example of a quasiparticle using the generator
from pcf.tools.pcf_generator.pcf_generator import GenerateQuasiparticle
quasiparticle = GenerateQuasiparticle(base_quasiparticle_definition)
print(quasiparticle.generate_definition())
# terminate the subnet created in this example
subnet_vpc_quasiparticle.set_desired_state(State.terminated)
subnet_vpc_quasiparticle.apply()
|
0 | capitalone_repos/Particle-Cloud-Framework/examples/quasiparticle | capitalone_repos/Particle-Cloud-Framework/examples/quasiparticle/cross_cloud/cross_cloud_storage.py | from pcf.particle.aws.s3.s3_bucket import S3Bucket
from pcf.core import State
from pcf.quasiparticle.cross_cloud.cross_cloud_storage.cross_cloud_storage import CrossCloudStorage
import os
import sys
# Edit example json to work in your account
cross_cloud_storage_example = {
"pcf_name": "cross_cloud_storage", # Required
"flavor": "cross_cloud_storage",
"storage_name": 'pcf-testing'
}
# create S3 Bucket particle
cross_cloud_storage = CrossCloudStorage(cross_cloud_storage_example)
# example start
cross_cloud_storage.set_desired_state(State.running)
cross_cloud_storage.apply()
print(cross_cloud_storage.get_state())
# example put object
some_binary_data = b'Here we have some data'
cross_cloud_storage.put_object(Bucket="pcf-testing", Key="test-object", Body=some_binary_data)
cross_cloud_storage.put_object(Bucket="pcf-testing", Key="test-file", Filename=os.path.join(sys.path[0],"test.txt"))
# example put terminate
cross_cloud_storage.delete_object(Bucket="pcf-testing", Key="test-object")
cross_cloud_storage.delete_object(Bucket="pcf-testing", Key="test-file")
cross_cloud_storage.set_desired_state(State.terminated)
cross_cloud_storage.apply()
print(cross_cloud_storage.get_state())
|
0 | capitalone_repos/Particle-Cloud-Framework/examples/quasiparticle | capitalone_repos/Particle-Cloud-Framework/examples/quasiparticle/cross_cloud/test.txt | test text file
|
0 | capitalone_repos/Particle-Cloud-Framework/examples/quasiparticle | capitalone_repos/Particle-Cloud-Framework/examples/quasiparticle/cross_cloud/README.MD | # What does this example do?
- This example uses a quasiparticle (AWS S3 + GCP Storage) to show how to replicate and manage data across multiple cloud providers. This example creates S3 and GCP storage buckets, uploads an object in the buckets, deletes both objects in the buckets, then terminates the buckets |
0 | capitalone_repos/Particle-Cloud-Framework/examples/quasiparticle/aws | capitalone_repos/Particle-Cloud-Framework/examples/quasiparticle/aws/simple_webapp/ec2-route53.py | import logging
from pcf.core import State
from pcf.quasiparticle.aws.ec2_route53.ec2_route53 import EC2Route53
logging.basicConfig(level=logging.DEBUG)
for handler in logging.root.handlers:
handler.addFilter(logging.Filter('pcf'))
# Edit example json to work in your account
# example quasiparticle that contains ec2 and route53
ec2_route53_example_definition = {
"pcf_name": "pcf-example", # Required
"flavor": "ec2_route53", # Required
"particles": [{
"flavor": "ec2_instance", # Required
"multiplier": 2,
"aws_resource": {
"custom_config": {
"instance_name": "pcf-ec2-test", # Required
"tags": {
"OwnerContact": "you@yourcompany.com"
},
},
# Refer to https://boto3.readthedocs.io/en/latest/reference/services/ec2.html#EC2.ServiceResource.create_instances for a full list of parameters
"ImageId": "ami-11111111", # Required
"InstanceType": "t2.nano", # Required
"KeyName": "my-key",
"MaxCount": 1,
"MinCount": 1,
"SecurityGroupIds": [
"sg-11111111",
"sg-22222222"
],
"SubnetId": "subnet-11111111", # Required
"UserData": "echo abc123",
"IamInstanceProfile": {
"Arn": "arn:aws:iam::111111111111:instance-profile/someRole"
},
"BlockDeviceMappings": [ # Required
{
"DeviceName": "/dev/sda1", # DeviceName changes for different Linux distro
"Ebs": {
"DeleteOnTermination": True,
"VolumeSize": 100,
"VolumeType": "gp2"
}
}
]
}
},
{
"flavor": "route53_record", # Required
"aws_resource": {
# Refer to https://boto3.readthedocs.io/en/latest/reference/services/route53.html#Route53.Client.change_resource_record_sets for full list of parameters
"Name": "testingtesting.aws-inno-dqa.cb4good.com.", # Required
"HostedZoneId": "1A1A1A1A1A1A1A", # Required
"TTL": 300,
"ResourceRecords": [], # Required
"Type": "A" # Required
}
}
]
}
# create ec2_route53 quasiparticle
ec2_route53_quasiparticle = EC2Route53(ec2_route53_example_definition)
# example start
ec2_route53_quasiparticle.set_desired_state(State.running)
ec2_route53_quasiparticle.apply(sync=True)
route53 = ec2_route53_quasiparticle.get_particle("route53_record", "pcf-example")
ec2 = ec2_route53_quasiparticle.get_particle("ec2_instance", "pcf-example-1")
print(ec2.get_state())
print(route53.get_state())
print(ec2_route53_quasiparticle.get_state())
# example terminate
ec2_route53_quasiparticle.set_desired_state(State.terminated)
ec2_route53_quasiparticle.apply(sync=True)
route53 = ec2_route53_quasiparticle.get_particle("route53_record", "pcf-example")
ec2 = ec2_route53_quasiparticle.get_particle("ec2_instance", "pcf-example")
print(ec2.get_state())
print(route53.get_state())
|
0 | capitalone_repos/Particle-Cloud-Framework/examples/quasiparticle/aws | capitalone_repos/Particle-Cloud-Framework/examples/quasiparticle/aws/simple_webapp/README.md | # What does this example do?
- The purpose of this example is to show one of multiple ways to use Particle Cloud Framework (PCF) to deploy cloud infrastructure which in this case is using a Flask api. In this particular example we deploy the ec2-route53 quasiparticle which creates a route53 record and adds the desired number on ec2 instances to the record set. To use in your environment, simply change some of the configurations parameters in the quasiparticle definition.
|
0 | capitalone_repos/Particle-Cloud-Framework/examples/quasiparticle/aws | capitalone_repos/Particle-Cloud-Framework/examples/quasiparticle/aws/ecs_cluster/ecs_instance_userdata.sh.j2 | # Export environment variables
{% for ENVIRONMENT_VARIABLE in ENVIRONMENT_VARIABLES|sort %}
export {{ENVIRONMENT_VARIABLE}}
{% endfor %}
# Run your code here
sudo apt install docker.io -y
sysctl -w net.ipv4.conf.all.route_localnet=1
iptables -t nat -A PREROUTING -p tcp -d 169.254.170.2 --dport 80 -j DNAT --to-destination 127.0.0.1:51679
iptables -t nat -A OUTPUT -d 169.254.170.2 -p tcp -m tcp --dport 80 -j REDIRECT --to-ports 51679
# More of the env setup from the docs:
mkdir -p /var/log/ecs
mkdir -p /var/lib/ecs/data
docker \
run \
--name ecs-agent \
--detach=true \
--volume=/var/run/docker.sock:/var/run/docker.sock \
--volume=/var/log/ecs/:/log:Z \
--volume=/var/lib/ecs/data:/data:Z \
--volume=/sys/fs/cgroup:/sys/fs/cgroup:ro \
--volume=/var/run/docker/execdriver/native:/var/lib/docker/execdriver/native:ro \
--net=host \
--env=ECS_LOGFILE=/log/ecs-agent.log \
--env=ECS_LOGLEVEL=info \
--env=ECS_DATADIR=/data \
--env=ECS_CLUSTER={{ecs_cluster_name}} \
--env=ECS_ENABLE_TASK_IAM_ROLE=true \
--env=ECS_ENABLE_TASK_IAM_ROLE_NETWORK_HOST=true \
--env=ECS_AVAILABLE_LOGGING_DRIVERS='["json-file","awslogs"]' \
--env=ECS_ENABLE_TASK_IAM_ROLE=true \
--env=http_proxy=$http_proxy \
--env=HTTP_PROXY=$http_proxy \
--env=HTTPS_PROXY=$https_proxy \
--env=https_proxy=$https_proxy \
--env=no_proxy=$no_proxy \
--env=NO_PROXY=$no_proxy \
--env=ECS_INSTANCE_ATTRIBUTES='{"instance-id":"'$(curl -s http://169.254.169.254/latest/meta-data/instance-id)'"}' \
--privileged \
amazon/amazon-ecs-agent:latest
|
0 | capitalone_repos/Particle-Cloud-Framework/examples/quasiparticle/aws | capitalone_repos/Particle-Cloud-Framework/examples/quasiparticle/aws/ecs_cluster/ecs_instance.py | import logging
from pcf.core import State
from pcf.quasiparticle.aws.ecs_instance_quasi.ecs_instance_quasi import ECSInstanceQuasi
logging.basicConfig(level=logging.DEBUG)
for handler in logging.root.handlers:
handler.addFilter(logging.Filter('pcf'))
# Edit example json to work in your account
# example ec2 instance quasiparticle
ecs_instance_quasi_example_definition = {
"pcf_name": "pcf-example", # Required
"flavor": "ecs_instance_quasi", # Required
"particles": [{
"flavor": "ec2_instance", # Required
"aws_resource": {
"custom_config": {
"instance_name": "pcf-ec2-test", # Required
"userdata_template_file": "ecs_instance_userdata.sh.j2",
# Required (AWS ECS Agent is run via userdata script to add EC2 to the ECS cluster)
"userdata_params": {
"ENVIRONMENT_VARIABLES": [
"HTTP_PROXY=http://proxy.mycompany.com:8080",
"HTTPS_PROXY=http://proxy.mycompany.com:8080",
"http_proxy=http://proxy.mycompany.com:8080",
"https_proxy=http://proxy.mycompany.com:8080",
"NO_PROXY=169.254.169.254,.mycompany.com,127.0.0.1,localhost,/var/run/docker.sock,$(curl -s http://169.254.169.254/latest/meta-data/local-ipv4/)",
"no_proxy=169.254.169.254,.mycompany.com,127.0.0.1,localhost,/var/run/docker.sock,$(curl -s http://169.254.169.254/latest/meta-data/local-ipv4/)",
"AWS_DEFAULT_REGION=us-east-1"
],
"ecs_cluster_name": "testing-quasi", # Required
}
},
# Refer to https://boto3.readthedocs.io/en/latest/reference/services/ec2.html#EC2.ServiceResource.create_instances for a full list of parameters
"ImageId": "ami-11111111", # Required
"InstanceType": "t2.nano", # Required
"KeyName": "my-key",
"MaxCount": 1,
"MinCount": 1,
"SecurityGroupIds": [
"sg-11111111",
"sg-22222222"
],
"SubnetId": "subnet-11111111", # Required
"IamInstanceProfile": {
"Arn": "arn:aws:iam::111111111111:instance-profile/someRole"
},
"BlockDeviceMappings": [ # Required
{
"DeviceName": "/dev/sda1", # DeviceName changes for different Linux distro
"Ebs": {
"DeleteOnTermination": True,
"VolumeSize": 100,
"VolumeType": "gp2"
}
}
]
}
},
{
"flavor": "ecs_cluster", # Required
"aws_resource": {
# Refer to https://boto3.readthedocs.io/en/latest/reference/services/ecs.html#ECS.Client.create_cluster for full list of parameters
"clusterName": "testing-quasi" # Required
}
},
{
"flavor": "ecs_instance", # Required
"aws_resource": {
"attributes": [{"name": "test", "value": "attribute"}]
}
}
]
}
# create ecs_instance quasiparticle
ec2_instance_quasiparticle = ECSInstanceQuasi(ecs_instance_quasi_example_definition)
# example start
ec2_instance_quasiparticle.set_desired_state(State.running)
ec2_instance_quasiparticle.apply()
ecs_cluster = ec2_instance_quasiparticle.get_particle("ecs_cluster", "pcf-example")
ec2 = ec2_instance_quasiparticle.get_particle("ec2_instance", "pcf-example")
print(ec2.get_state())
print(ecs_cluster.get_state())
print(ec2_instance_quasiparticle.get_state())
# example terminate
ec2_instance_quasiparticle.set_desired_state(State.terminated)
ec2_instance_quasiparticle.apply()
ecs_cluster = ec2_instance_quasiparticle.get_particle("ecs_cluster", "pcf-example")
ec2 = ec2_instance_quasiparticle.get_particle("ec2_instance", "pcf-example")
print(ec2.get_state())
print(ecs_cluster.get_state())
ec2_instance_quasiparticle.get_state()
print(ec2_instance_quasiparticle.get_state())
|
0 | capitalone_repos/Particle-Cloud-Framework/examples/quasiparticle/aws | capitalone_repos/Particle-Cloud-Framework/examples/quasiparticle/aws/ecs_cluster/README.md | # What does this example do?
- This example uses a quasiparticle (EC2 Instance + ECS Cluster + ECS Instance) to show how to create an ECS Cluster, creating an EC2 instance and adding that EC2 instance to the EC2 Cluster as a container host. Then terminates all the infrastructure resources
|
0 | capitalone_repos/Particle-Cloud-Framework/examples/quasiparticle/aws | capitalone_repos/Particle-Cloud-Framework/examples/quasiparticle/aws/jit/README.md | # What is Just in Time Infrastructure
Just in Time Infrastructure (JiTI) is a deployment model in which the entire infrastructure required for an application is written in code and deployed with the same life cycles alongside
the application. This ensures that your infrastructure code is always the most recent version, providing security and resiliency. JiTI also makes it easier to share and deploy your
application into different accounts since there are no account specific configurations or dependencies. Check our docs for more information on JiTI and the benefits this provides.
# What does this example do?
- This example deploys an ec2 (application layer) alongside all required infrastructure pieces and then proceeds to terminate them all. This example requires no configuration
to start (only an aws account with access to provision resources) and deploys vpc, subnet, security group, instance profile, and ec2. This example can be expanded to any
application which would then be able to be deployed and shared easily without configuration changes.
|
0 | capitalone_repos/Particle-Cloud-Framework/examples/quasiparticle/aws | capitalone_repos/Particle-Cloud-Framework/examples/quasiparticle/aws/jit/jit.py | import logging
import json
from pcf.core import State
from pcf.core.quasiparticle import Quasiparticle
from pcf.particle.aws.vpc.vpc_instance import VPCInstance
from pcf.particle.aws.vpc.subnet import Subnet
from pcf.particle.aws.vpc.security_group import SecurityGroup
from pcf.particle.aws.ec2.ec2_instance import EC2Instance
from pcf.particle.aws.iam.iam_role import IAMRole
logging.basicConfig(level=logging.DEBUG)
for handler in logging.root.handlers:
handler.addFilter(logging.Filter('pcf'))
vpc_definition = {
"flavor": "vpc_instance",
"aws_resource": {
"custom_config": {
"vpc_name": "jit-vpc",
},
"CidrBlock":"10.0.0.0/16"
}
}
subnet_definition = {
"flavor": "subnet",
"parents":["vpc_instance:pcf-jit-example"],
"aws_resource": {
"custom_config": {
"subnet_name": "jit-subnet",
},
"CidrBlock":"10.0.0.0/24"
}
}
security_group_definition = {
"flavor": "security_group",
"parents":["vpc_instance:pcf-jit-example"],
"aws_resource": {
"custom_config":{
"IpPermissions":[
{
"FromPort": 80,
"IpProtocol": "tcp",
"IpRanges": [{"CidrIp": "0.0.0.0/0"}],
"ToPort": 80,
"Ipv6Ranges": [],
"PrefixListIds": [],
"UserIdGroupPairs": []
}
]
},
"GroupName":"jit-sg",
"Description":"jit-sg"
}
}
assume_role_policy_document = json.dumps({
"Version": "2012-10-17",
"Statement": [
{
"Action": "sts:AssumeRole",
"Principal": {
"Service": "ec2.amazonaws.com"
},
"Effect": "Allow",
}
]
})
iam_role_definition = {
"flavor":"iam_role", # Required
"aws_resource":{
"custom_config": {
"policy_arns": [],
"IsInstanceProfile": True
},
"RoleName":"jit-iam", # Required
"AssumeRolePolicyDocument": assume_role_policy_document
},
}
ec2_definition = {
"flavor": "ec2_instance", # Required
"parents":["security_group:pcf-jit-example","subnet:pcf-jit-example","vpc_instance:pcf-jit-example", "iam_role:pcf-jit-example"],
"aws_resource": {
"custom_config": {
"instance_name": "jit-ec2", # Required
},
# Refer to https://boto3.readthedocs.io/en/latest/reference/services/ec2.html#EC2.ServiceResource.create_instances for a full list of parameters
"ImageId": "$lookup$ami$ubuntu/images/hvm-ssd/ubuntu-bionic-18.04-amd64-server-20180912", # Required <------
"InstanceType": "t2.nano", # Required
"MaxCount": 1,
"MinCount": 1,
"SecurityGroupIds": ["$inherit$security_group:pcf-jit-example$GroupId"],
"SubnetId":"$inherit$subnet:pcf-jit-example$SubnetId", # Required
"IamInstanceProfile": {
"Arn": "$lookup$iam$instance-profile:jit-iam"
},
"UserData": "echo abc123",
}
}
# example quasiparticle that contains all required infrastructure.
jit_example_definition = {
"pcf_name": "pcf-jit-example", # Required
"flavor": "quasiparticle", # Required
"particles": [
vpc_definition,
security_group_definition,
subnet_definition,
iam_role_definition,
ec2_definition
]
}
# create quasiparticle
jit_quasiparticle = Quasiparticle(jit_example_definition)
# start example
jit_quasiparticle.set_desired_state(State.running)
jit_quasiparticle.apply(sync=True)
print(jit_quasiparticle.get_state())
# terminate example
jit_quasiparticle.set_desired_state(State.terminated)
jit_quasiparticle.apply(sync=True)
print(jit_quasiparticle.get_state())
|
0 | capitalone_repos/Particle-Cloud-Framework/examples | capitalone_repos/Particle-Cloud-Framework/examples/flask_app/pcf_example_config.json | {
"pcf_name": "pcf-example",
"flavor": "ec2_route53",
"particles": [{
"flavor": "ec2_instance",
"multiplier": 1,
"aws_resource": {
"custom_config": {
"instance_name": "pcf-ec2-test",
"tags": {
"OwnerContact": "you@yourcompany.com"
}
},
"ImageId": "ami-11111111",
"InstanceType": "t2.nano",
"KeyName": "my-key",
"MaxCount": 1,
"MinCount": 1,
"SecurityGroupIds": [
"sg-11111111",
"sg-22222222"
],
"SubnetId": "subnet-11111111",
"UserData": "echo abc123",
"IamInstanceProfile": {
"Arn": "arn:aws:iam::111111111111:instance-profile/someRole"
}
}
},
{
"flavor": "route53_record",
"aws_resource": {
"Name": "testingtesting.aws-inno-dqa.cb4good.com.",
"HostedZoneId": "1A1A1A1A1A1A1A",
"TTL": 300,
"ResourceRecords": [],
"Type": "A"
}
}
]
} |
0 | capitalone_repos/Particle-Cloud-Framework/examples | capitalone_repos/Particle-Cloud-Framework/examples/flask_app/app.py | import json
import pcf
from flask import Flask, request
from pcf.core import State
from pcf.quasiparticle.aws.ec2_route53.ec2_route53 import EC2Route53
app = Flask(__name__)
#We read the json configuration of our desired quasiparticle
with open('pcf_example_config.json', 'r') as f:
pcf_config = json.load(f)
#Here we initialize the desired state definitions
ec2_route53_quasiparticle = EC2Route53(pcf_config)
#GET /pcf endpoint returns the current state of our EC2 Route53 Quasiparticle
@app.route('/pcf', methods=['GET'])
def get_pcf_status():
return str(ec2_route53_quasiparticle.get_state())
#POST /pcf endpoint creates the EC2 Route53 Quasiparticle with the desired configuration
@app.route('/pcf', methods=['POST'])
def create():
if request.data:
payload = json.loads(request.data)
multiplier = payload.get('ec2_multiplier')
pcf_config['particles'][0]['multiplier'] = multiplier
ec2_route53_quasiparticle = EC2Route53(pcf_config)
ec2_route53_quasiparticle.set_desired_state(State.running)
try:
ec2_route53_quasiparticle.apply(sync=True)
except Exception as e:
raise e
return str(ec2_route53_quasiparticle.get_state())
#DELETE /pcf endpoint deletes the EC2 Route53 Quasiparticle
@app.route('/pcf', methods=['DELETE'])
def delete():
ec2_route53_quasiparticle.set_desired_state(State.terminated)
try:
ec2_route53_quasiparticle.apply(sync=True)
except Exception as e:
raise e
return str(ec2_route53_quasiparticle.apply(sync=True))
if __name__ == '__main__':
app.run(debug=True, host='0.0.0.0') |
0 | capitalone_repos/Particle-Cloud-Framework/examples | capitalone_repos/Particle-Cloud-Framework/examples/flask_app/requirements.txt | flask==1.0.2
pcf=0.3.1 |
0 | capitalone_repos/Particle-Cloud-Framework/examples | capitalone_repos/Particle-Cloud-Framework/examples/flask_app/README.md | # PCF-Flask-API
- The purpose of this example is to show one of multiple ways to use Particle Cloud Framework (PCF) to deploy cloud infrastructure which in this case is using a Flask api. In this particular example we deploy the ec2-route53 quasiparticle which creates a route53 record and adds the desired number on ec2 instances to the record set. To use in your environment, simply change some of the configurations parameters in the quasiparticle definition.
##
Run Flask App Locally with Docker
------------
`docker build -t pcf_flask_api .`
`docker run -it -p 5000:5000 pcf_flask_api `
Interact with PCF Flask API
------------
Once we have the flask app running locally, we can hit the endpoint to spin up the quasiparticle.
First, set the desired configuration in pcf_example_config.json file.
To spin up the ec2-route53 quasiparticle with the desired configuration:
`curl -X POST http://localhost:5000/pcf`
To spin up multiple ec2 instances, specify the desired count in the request payload:
`curl -X POST --header "Content-Type: application/json" -d "{ \"ec2_multiplier\": 2 }" http://localhost:5000/pcf`
To get the state of the ec2-route53 quasiparticle:
`curl -X GET http://localhost:5000/pcf`
To spin down the ec2-route53 quasiparticle:
`curl -X DELETE http://localhost:5000/pcf`
To do an update, make a POST request after setting the new desired configuration:
`curl -X POST http://localhost:5000/pcf`
|
0 | capitalone_repos/Particle-Cloud-Framework/examples | capitalone_repos/Particle-Cloud-Framework/examples/flask_app/dockerfile | FROM python:alpine3.7
COPY . /app
WORKDIR /app
RUN pip install -r requirements.txt
CMD python ./app.py |
0 | capitalone_repos/Particle-Cloud-Framework/examples/particle/azure/blob | capitalone_repos/Particle-Cloud-Framework/examples/particle/azure/blob/use_blob_container/README.md | # What does this example do?
- This example creates an Azure blob container and deletes it
- The storage account name resource group are just placeholders. They either need to be created or the fields can be
changed to match existing ones.
|
0 | capitalone_repos/Particle-Cloud-Framework/examples/particle/azure/blob | capitalone_repos/Particle-Cloud-Framework/examples/particle/azure/blob/use_blob_container/example_blob_container.py | from pcf.particle.azure.blob.blob_container import BlobContainer
from pcf.core import State
particle_definition = {
"pcf_name": "pcf_storage", # Required
"flavor": "blob", # Required
"azure_resource": {
"name": "pcf-blob", # Required
"storage_account": "wahoo", # Required
"resource_group": "hoo-resource-group", # Required
"public": True
}
}
blob = BlobContainer(particle_definition)
blob.set_desired_state(State.running)
blob.apply()
print(blob.get_state())
blob.set_desired_state(State.terminated)
blob.apply()
print(blob.get_state())
|
0 | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/cloudwatch | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/cloudwatch/use_cloudwatch_log/README.md | # What does this example do?
- This example creates a CloudWatch Log, updates it in different ways, then terminates it
|
0 | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/cloudwatch | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/cloudwatch/use_cloudwatch_log/example_cloudwatch_log.py | from pcf.core import State
from pcf.particle.aws.cloudwatch.cloudwatch_log import CloudWatchLog
# example cloudwatch log particle
particle_definition = {
"pcf_name": "pcf_cloudwatch_log", #Required
"flavor": "logs", #Required
"aws_resource": {
# https://boto3.readthedocs.io/en/latest/reference/services/logs.html#id39
"logGroupName": "Cloud_Watch_Log_A", #Required
# "kmsKeyId": "keyA", #Must use valid key
"tags": {
# key-value pairs for tags
"removed": "tag to be removed",
"tagA": "string"
}
}
}
# create cloudwatch events particle using json
cloudwatch_log_particle = CloudWatchLog(particle_definition)
# example start
cloudwatch_log_particle.set_desired_state(State.running)
cloudwatch_log_particle.apply()
print(cloudwatch_log_particle.get_state())
print(cloudwatch_log_particle.get_current_definition())
print(cloudwatch_log_particle.get_current_state_definition())
# run again without changing anything
cloudwatch_log_particle.set_desired_state(State.running)
cloudwatch_log_particle.apply()
print(cloudwatch_log_particle.get_current_definition())
# example update
updated_def = particle_definition
updated_def["aws_resource"]["tags"]["tagA"] = "new string"
updated_def["aws_resource"]["tags"]["tagB"] = "new tag"
updated_def["aws_resource"]["tags"].pop("removed")
cloudwatch_log_particle = CloudWatchLog(updated_def)
cloudwatch_log_particle.set_desired_state(State.running)
cloudwatch_log_particle.apply()
print(cloudwatch_log_particle.get_state())
print(cloudwatch_log_particle.get_current_definition())
print(cloudwatch_log_particle.get_current_state_definition())
# example terminate
cloudwatch_log_particle.set_desired_state(State.terminated)
cloudwatch_log_particle.apply()
print(cloudwatch_log_particle.get_state())
|
0 | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/cloudwatch | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/cloudwatch/use_cloudwatch_event/example_cloudwatch_event.py | from pcf.core import State
from pcf.particle.aws.cloudwatch.cloudwatch_event import CloudWatchEvent
#example cloudwatch event
cloudwatch_event_example_json = {
"pcf_name": "pcf_cloudwatch_event", #Required
"flavor": "events", #Required
"aws_resource": {
# Refer to https://boto3.readthedocs.io/en/latest/reference/services/events.html#CloudWatchEvents.Client.put_rule for a full list of parameters
"Name": "PCFTest", #Required
"ScheduleExpression": "rate(1 minute)", #Required
"EventPatten": "", #Required
"State": "ENABLED", #Required
"Description": "pcf cloudwatch event",
}
}
# create cloudwatch event particle using json
cloudwatch_event_particle = CloudWatchEvent(cloudwatch_event_example_json)
#example start
cloudwatch_event_particle.set_desired_state(State.running)
cloudwatch_event_particle.apply()
print(cloudwatch_event_particle.get_state())
print(cloudwatch_event_particle.get_current_state_definition())
#example update
cloudwatch_event_example_json["aws_resource"]["State"] = 'DISABLED'
cloudwatch_event_example_json["aws_resource"]["Descriptiom"] = 'pcf cloudwatch event update'
cloudwatch_event_particle = CloudWatchEvent(cloudwatch_event_example_json)
cloudwatch_event_particle.set_desired_state(State.running)
cloudwatch_event_particle.apply()
print(cloudwatch_event_particle.get_state())
print(cloudwatch_event_particle.get_current_state_definition())
#example terminate
cloudwatch_event_particle.set_desired_state(State.terminated)
cloudwatch_event_particle.apply()
print(cloudwatch_event_particle.get_state())
|
0 | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/cloudwatch | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/cloudwatch/use_cloudwatch_event/README.md | # What does this example do?
- This example creates a CloudWatch Event, then terminates it
|
0 | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/route53 | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/route53/use_route53/example_hosted_zone.py | # Copyright 2018 Capital One Services, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from pcf.particle.aws.route53.hosted_zone import HostedZone
from pcf.core import State
import random
import string
particle_definition = {
"pcf_name": "pcf_hosted_zone",
"flavor": "route53_hosted_zone",
"aws_resource": {
"Name": "www.hoooooos.com.",
"custom_config": {
"Tags": [
{
"Key": "Owner",
"Value": "Hoo"
}
]
},
"VPC": {
"VPCRegion": "us-east-1",
"VPCId": "vpc-12345"
},
"CallerReference": ''.join(random.choices(string.ascii_uppercase + string.digits, k=20)),
"HostedZoneConfig": {
"Comment": "hoo",
"PrivateZone": True
},
# "DelegationSetId": ""
}
}
hosted_zone = HostedZone(particle_definition)
# example start
hosted_zone.set_desired_state(State.running)
hosted_zone.apply()
print(hosted_zone.get_state())
print(hosted_zone.get_current_state_definition())
# example Terminate
hosted_zone.set_desired_state(State.terminated)
hosted_zone.apply()
print(hosted_zone.get_state())
|
0 | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/route53 | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/route53/use_route53/README.md | # What does this example do?
- This example creates a Route53 record, updates it, then terminates it
|
0 | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/route53 | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/route53/use_route53/example_route53_record.py | from pcf.particle.aws.route53.route53_record import Route53Record
from pcf.core import State
# Edit example json to work in your account
route53_record_example_json = {
"pcf_name": "route_53", # Required
"flavor":"route53_record", # Required
"aws_resource":{
# Refer to https://boto3.readthedocs.io/en/latest/reference/services/route53.html#Route53.Client.change_resource_record_sets for full list of parameters
"Name":"test.test-name.com.", # Required
"HostedZoneId":"HostedZoneId", # Required
"TTL":333,
"ResourceRecords":[{"Value":"1.1.1.1"}], # Required
"Type":"A" # Required
}
}
# create route53 record particle
route53 = Route53Record(route53_record_example_json)
# example start
route53.set_desired_state(State.running)
route53.apply(cascade=True)
print(route53.get_state())
# example update
updated_def = route53_record_example_json
updated_def["aws_resource"]["ResourceRecords"] = [{"Value":"2.3.4.5"}]
route53 = Route53Record(updated_def)
route53.set_desired_state(State.running)
route53.apply(cascade=True)
print(route53.get_state())
print(route53.get_current_state_definition())
# example terminate
route53.set_desired_state(State.terminated)
route53.apply(cascade=True)
print(route53.get_state())
|
0 | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/s3 | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/s3/use_s3/example_s3_bucket.py | from pcf.particle.aws.s3.s3_bucket import S3Bucket
from pcf.core import State
import sys
import os
# Edit example json to work in your account
s3_bucket_example_json = {
"pcf_name": "pcf_s3_bucket", # Required
"flavor":"s3_bucket", # Required
"aws_resource":{
# Refer to https://boto3.readthedocs.io/en/latest/reference/services/s3.html#S3.Client.create_bucket for full list of parameters
"Bucket":"pcf-test", # Required
"custom_config": {
"Tags":{
"Name": "pcf-s3-example"
}
}
}
}
# create S3 Bucket particle
s3 = S3Bucket(s3_bucket_example_json)
# example start
s3.set_desired_state(State.running)
s3.apply()
print(s3.get_state())
# example put object
some_binary_data = b'Here we have some data'
print(s3.client.put_object(Bucket=s3.bucket_name, Key="test-object",Body=some_binary_data))
s3.resource.Bucket(s3.bucket_name).upload_file(Key="test-file", Filename=os.path.join(sys.path[0],"test.txt"))
# example get object
file_body = s3.client.get_object(Bucket=s3.bucket_name, Key="test-file")['Body']
for line in file_body._raw_stream:
print(line)
# example get tags
print(s3.client.get_bucket_tagging(Bucket=s3.bucket_name).get("TagSet"))
# example terminate
s3.client.delete_object(Bucket=s3.bucket_name, Key="test-object")
s3.client.delete_object(Bucket=s3.bucket_name, Key="test-file")
s3.set_desired_state(State.terminated)
s3.apply()
s3.apply()
print(s3.get_state())
|
0 | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/s3 | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/s3/use_s3/README.md | # What does this example do?
- This example creates an S3 bucket, upload an object in the bucket, reads the object in the bucket, deletes the object in the bucket, then terminates the bucket
|
0 | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/s3 | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/s3/use_s3/test.txt | test text file
|
0 | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/emr | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/emr/use_emr_cluster/use_emr_cluster.py | from pcf.particle.aws.emr.emr_cluster import EMRCluster
from pcf.core import State
# Edit example json to work in your account
particle_definition_instance_group ={
"pcf_name":"emr",
"flavor":"emr_cluster",
"aws_resource": {
"ReleaseLabel":"emr-5.19.0",
"Instances":{
"KeepJobFlowAliveWhenNoSteps":True,
"InstanceGroups":[{
"InstanceRole":"MASTER",
"Name":"master",
"InstanceCount":1,
"InstanceType":'m3.xlarge',
}]
},
"JobFlowRole":"EMR_EC2_DefaultRole",
"Name":"test",
"ServiceRole":"EMR_DefaultRole"
}
}
particle_definition_instance_fleet = {
"pcf_name": "pcf_cluster",
"flavor": "emr_cluster",
"aws_resource": {
"ReleaseLabel":"emr-5.19.0",
"Instances":{
"KeepJobFlowAliveWhenNoSteps":True,
"InstanceFleets":[{
"InstanceFleetType":"MASTER",
"Name":"master",
"TargetOnDemandCapacity":1,
"InstanceTypeConfigs": [{
"InstanceType":'m3.xlarge',
}]
}]
},
"JobFlowRole":"EMR_EC2_DefaultRole",
"Name":"test",
"ServiceRole":"EMR_DefaultRole"
}
}
# Input desired EMR config
emr = EMRCluster(particle_definition_instance_group)
# Start
emr.set_desired_state(State.running)
emr.apply()
print(emr.state)
print(emr.current_state_definition)
# Example Update (only updates non master instance counts, TargetOnDemandCapacity, or TargetSpotCapacity)
# particle_definition_instance_group["aws_resource"]["Instances"]["InstanceGroups"][0]["InstanceCount"] = 2
# particle_definition_instance_fleet["aws_resource"]["Instances"]["InstanceFleets"][0]["TargetOnDemandCapacity"] = 2
# emr = EMRCluster(particle_definition_instance_group)
# emr.set_desired_state(State.running)
# emr.apply()
# print(emr.state)
# print(emr.current_state_definition)
# Terminate
emr.set_desired_state(State.terminated)
emr.apply()
print(emr.state)
|
0 | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/emr | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/emr/use_emr_cluster/README.md | # What does this example do?
- This example spins up an EMR Cluster and terminates it. Contains sample configs for instance group or instance fleets.
|
0 | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/ecs | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/ecs/use_ecs_service/example_ecs_service.py | from pcf.particle.aws.ecs.ecs_cluster import ECSCluster
from pcf.particle.aws.ecs.ecs_task_definition import ECSTaskDefinition
from pcf.particle.aws.ecs.ecs_service import ECSService
from pcf.core import State
from pcf.core.pcf import PCF
# Example ECS Cluster config json
# ECS Cluster is a required parent for ECS Service
ecs_cluster_example_json = {
"pcf_name": "pcf_ecs_cluster", # Required
"flavor": "ecs_cluster", # Required
"aws_resource": {
# Refer to https://boto3.readthedocs.io/en/latest/reference/services/ecs.html#ECS.Client.create_cluster for full list of parameters
"clusterName": "pcf_example" # Required
}
}
# Setup required parent ecs_cluster particle using a sample configuration
ecs_cluster = ECSCluster(ecs_cluster_example_json)
# Example ECS Task Definition config json
# ECS Task Defintion is a required parent for ECS Service
ecs_task_def_example_json = {
"pcf_name": "task-def", # Required
"flavor": "ecs_task_definition", # Required
"aws_resource": {
# Refer to https://boto3.readthedocs.io/en/latest/reference/services/ecs.html#ECS.Client.register_task_definition for full list of available properties
"family": "pcf-ecs-task-def-example", # Required
"containerDefinitions": [ # Required
{
"name": "pcf-ecs-task-def-example", # Required
"memory": 60000,
"cpu": 3800,
"essential": True,
"privileged": True,
"image": "debian:jessie", # Required
"portMappings": [
{
"hostPort": 0,
"containerPort": 8000,
"protocol": "tcp"
}
],
"mountPoints": [
{
"containerPath": "/usr/local/folder",
"sourceVolume": "myfolder",
"readOnly": True
}
],
"environment": [
{
"name": "http_proxy",
"value": "http://proxy.mycompany.com:8080"},
{
"name": "https_proxy",
"value": "http://proxy.mycompany.com:8080"
},
{
"name": "no_proxy",
"value": "localhost,127.0.0.1,169.254.169.254,169.254.170.2,.mycompany.com"
}
],
}
],
"volumes": [
{
"host": {
"sourcePath": "/var/lib/somefolder/"
},
"name": "myfolder"
}
]
}
}
# Setup required parent ecs_task_definition particle using a sample configuration
ecs_task_def = ECSTaskDefinition(ecs_task_def_example_json)
# example ECS Service config json
ecs_service_example_json = {
"pcf_name": 'pcf_ecs_service',
"flavor": "ecs_service",
"parents": [
ecs_cluster.get_pcf_id(), # Required. This replaces Cluster in aws_resource
ecs_task_def.get_pcf_id() # Required. This replaces taskDefinition in aws_resource
],
"aws_resource": {
# Refer to https://boto3.readthedocs.io/en/latest/reference/services/ecs.html#ECS.Client.create_service for a full list of parameters
"serviceName": "pcf_ecs_service_example",
"desiredCount": 1
}
}
# Setup ecs_service particle using a sample configuration
ecs_service = ECSService(ecs_service_example_json)
pcf = PCF([])
pcf.add_particles((
ecs_cluster,
ecs_service,
ecs_task_def,
))
pcf.link_particles(pcf.particles)
pcf.apply(sync=True, cascade=True)
# example start
ecs_cluster.set_desired_state(State.running)
ecs_task_def.set_desired_state(State.running)
ecs_service.set_desired_state(State.running)
pcf.apply(sync=True, cascade=True)
print(ecs_service.get_state())
print(ecs_service.get_current_state_definition())
# example terminate
ecs_service.set_desired_state(State.terminated)
ecs_service.set_desired_state(State.terminated)
ecs_service.set_desired_state(State.terminated)
pcf.apply(sync=True, cascade=True)
print(ecs_service.get_state())
|
0 | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/ecs | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/ecs/use_ecs_service/README.md | # What does this example do?
- This example creates required parents ECS Cluster and a ECS Task Definition, then create ECS Service, then terminates all 3 resources
|
0 | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/ecs | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/ecs/use_ecs_cluster/example_ecs_cluster.py | from pcf.particle.aws.ecs.ecs_cluster import ECSCluster
from pcf.core import State
# example ECS Cluster config json
ecs_cluster_example_json = {
"pcf_name": "pcf_ecs_cluster", # Required
"flavor": "ecs_cluster", # Required
"aws_resource": {
# Refer to https://boto3.readthedocs.io/en/latest/reference/services/ecs.html#ECS.Client.create_cluster for full list of parameters
"clusterName": "pcf_example" # Required
}
}
# Setup ecs_cluster particle using a sample configuration
ecs_cluster = ECSCluster(ecs_cluster_example_json)
# example start
ecs_cluster.set_desired_state(State.running)
ecs_cluster.apply()
print(ecs_cluster.get_state())
print(ecs_cluster.get_current_state_definition())
# example terminate
ecs_cluster.set_desired_state(State.terminated)
ecs_cluster.apply()
print(ecs_cluster.get_state())
|
0 | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/ecs | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/ecs/use_ecs_cluster/README.md | # What does this example do?
- This example creates an Elastic Container Service (ECS) Cluster, then terminates the ECS Cluster
|
0 | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/ecs | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/ecs/use_ecs_task_definiton/README.md | # What does this example do?
- This example creates a sample ECS task definition using a Docker image from Dockerhub
|
0 | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/ecs | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/ecs/use_ecs_task_definiton/example_ecs_task_definition.py | from pcf.particle.aws.ecs.ecs_task import ECSTaskDefinition
from pcf.core import State
# example ECS Task Definition config json.
ecs_task_def_example_json = {
"pcf_name": "task-def", # Required
"flavor": "ecs_task_definition", # Required
"aws_resource": {
# Refer to https://boto3.readthedocs.io/en/latest/reference/services/ecs.html#ECS.Client.register_task_definition for full list of available properties
"family": "pcf-ecs-task-def-example", # Required
"containerDefinitions": [ # Required
{
"name": "pcf-ecs-task-def-example", # Required
"memory": 60000,
"cpu": 3800,
"essential": True,
"privileged": True,
"image": "debian:jessie", # Required
"portMappings": [
{
"hostPort": 0,
"containerPort": 8000,
"protocol": "tcp"
}
],
"mountPoints": [
{
"containerPath": "/usr/local/folder",
"sourceVolume": "myfolder",
"readOnly": True
}
],
"environment": [
{
"name": "http_proxy",
"value": "http://proxy.mycompany.com:8080"},
{
"name": "https_proxy",
"value": "http://proxy.mycompany.com:8080"
},
{
"name": "no_proxy",
"value": "localhost,127.0.0.1,169.254.169.254,169.254.170.2,.mycompany.com"
}
],
}
],
"volumes": [
{
"host": {
"sourcePath": "/var/lib/somefolder/"
},
"name": "myfolder"
}
]
}
}
# Setup ecs_cluster particle using a sample configuration
ecs_task_definition = ECSTaskDefinition(ecs_task_def_example_json)
# example start
ecs_task_definition.set_desired_state(State.running)
ecs_task_definition.apply()
print(ecs_task_definition.get_state())
print(ecs_task_definition.get_current_state_definition())
|
0 | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/ecs | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/ecs/use_ecs_instance/example_ecs_instance.py | from pcf.particle.aws.ecs.ecs_cluster import ECSCluster
from pcf.particle.aws.ec2.ec2_instance import EC2Instance
from pcf.particle.aws.ecs.ecs_instance import ECSInstance
from pcf.core import State
from pcf.core.pcf import PCF
# Example ECS Cluster config json
# ECS Cluster is a required parent for ECS Service
ecs_cluster_example_json = {
"pcf_name": "pcf_ecs_cluster", # Required
"flavor": "ecs_cluster", # Required
"aws_resource": {
# Refer to https://boto3.readthedocs.io/en/latest/reference/services/ecs.html#ECS.Client.create_cluster for full list of parameters
"clusterName": "pcf_example" # Required
}
}
# Setup required parent ecs_cluster particle using a sample configuration
ecs_cluster = ECSCluster(ecs_cluster_example_json)
# Example EC2 Instance config json
ec2_instance_example_json = {
"pcf_name": "ec2-example", # Required
"flavor": "ec2_instance", # Required
"aws_resource": {
"custom_config": {
"instance_name": "my-instance", # Required
"userdata_iamparams": {
"ENVIRONMENT_VARIABLES": [
"PROXY=http://proxy.mycompany.com:8080",
"HTTP_PROXY=$PROXY",
"HTTPS_PROXY=$PROXY",
"http_proxy=$PROXY",
"https_proxy=$PROXY",
"NO_PROXY=169.254.169.254,.mycompany.com,127.0.0.1,localhost,$(curl -s http://169.254.169.254/latest/meta-data/local-ipv4/)",
"no_proxy=$NO_PROXY"
]
},
"tags": {
"Name": "pcf-ec2-example"
}
},
# Refer to https://boto3.readthedocs.io/en/latest/reference/services/ec2.html#EC2.ServiceResource.create_instances for a full list of parameters
"ImageId": "ami-11111111", # Required
"InstanceType": "t2.nano",
"KeyName": "my-key",
"MaxCount": 1,
"MinCount": 1,
"SecurityGroupIds": [
"sg-11111111",
"sg-22222222"
],
"SubnetId": "subnet-11111111", # Required
"IamInstanceProfile": {
"Arn": "arn:aws:iam::111111111111:instance-profile/AAAAAAAAAA"
},
"BlockDeviceMappings": [ # Required
{
"DeviceName": "/dev/sda1", # DeviceName changes for different Linux distro
"Ebs": {
"DeleteOnTermination": True,
"VolumeSize": 20,
"VolumeType": "gp2"
}
}
]
}
}
# Setup required parent ec2_instance particle using a sample configuration
ec2_instance = EC2Instance(ec2_instance_example_json)
# Example ECS Instance config json
ecs_instance_example_json = {
"pcf_name": "pcf_ecs_instance", # Required
"flavor": "ecs_instance", # Required
"parents": [
ecs_cluster.get_pcf_id(), # Required. This replaces Cluster in aws_resource
ec2_instance.get_pcf_id(), # Required. This replaces EC2 Instance ID in aws_resource
],
"aws_resource": {
}
}
# Setup ecs_instance particle using a sample configuration
ecs_instance = ECSInstance(ecs_instance_example_json)
pcf = PCF([])
pcf.add_particles((
ecs_cluster,
ec2_instance,
ecs_instance,
))
pcf.link_particles(pcf.particles)
pcf.apply(sync=True, cascade=True)
# example start
ecs_cluster.set_desired_state(State.running)
ec2_instance.set_desired_state(State.running)
ecs_instance.set_desired_state(State.running)
pcf.apply(sync=True, cascade=True)
print(ecs_instance.get_state())
print(ecs_instance.get_current_state_definition())
# example terminate
ecs_cluster.set_desired_state(State.terminated)
ec2_instance.set_desired_state(State.terminated)
ecs_instance.set_desired_state(State.terminated)
pcf.apply(sync=True, cascade=True)
print(ecs_instance.get_state())
|
0 | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/ecs | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/ecs/use_ecs_instance/example_userdata.sh.j2 | #!/bin/bash
# Export environment variables
{% for ENVIRONMENT_VARIABLE in ENVIRONMENT_VARIABLES %}
export {{ENVIRONMENT_VARIABLE}}
{% endfor %}
# Start the ECS Agent on Amazon Linux AMI.
# Refer to this amazon-ecs-agent repo to learn how to start ecs agent in other Linux distros - https://github.com/aws/amazon-ecs-agent
sudo yum install ecs-init && sudo start ecs
|
0 | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/ecs | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/ecs/use_ecs_instance/README.md | # What does this example do?
- This example creates required parents ECS Cluster and EC2 instance, then adds the EC2 instance into the newly created ECS Cluster as an ECS Instance, then terminates all 3 resources
|
0 | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/ecs | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/ecs/use_ecs_task/README.md | # What does this example do?
- ECS Task particle requires task definition and ecs cluster either to be included in the initial state definition or have those particles as parents.
- This example creates required parents ECS Cluster and a ECS Task Definition, then create ECS task, then terminates all 3 resources
|
0 | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/ecs | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/ecs/use_ecs_task/example_ecs_task.py | from pcf.particle.aws.ecs.ecs_cluster import ECSCluster
from pcf.particle.aws.ecs.ecs_task_definition import ECSTaskDefinition
from pcf.particle.aws.ecs.ecs_task import ECSTask
from pcf.core import State
from pcf.core.pcf import PCF
# Example ECS Cluster config json
# ECS Cluster is a required parent for ECS Service
ecs_cluster_example_json = {
"pcf_name": "pcf_ecs_cluster", # Required
"flavor": "ecs_cluster", # Required
"aws_resource": {
# Refer to https://boto3.readthedocs.io/en/latest/reference/services/ecs.html#ECS.Client.create_cluster for full list of parameters
"clusterName": "pcf_example" # Required
}
}
# Setup required parent ecs_cluster particle using a sample configuration
ecs_cluster = ECSCluster(ecs_cluster_example_json)
# Example ECS Task Definition config json
# ECS Task Defintion is a required parent for ECS Service
ecs_task_def_example_json = {
"pcf_name": "task-def", # Required
"flavor": "ecs_task_definition", # Required
"aws_resource": {
# Refer to https://boto3.readthedocs.io/en/latest/reference/services/ecs.html#ECS.Client.register_task_definition for full list of available properties
"family": "pcf-ecs-task-def-example", # Required
"containerDefinitions": [ # Required
{
"name": "pcf-ecs-task-def-example", # Required
"memory": 60000,
"cpu": 3800,
"essential": True,
"privileged": True,
"image": "debian:jessie", # Required
"portMappings": [
{
"hostPort": 0,
"containerPort": 8000,
"protocol": "tcp"
}
],
"mountPoints": [
{
"containerPath": "/usr/local/folder",
"sourceVolume": "myfolder",
"readOnly": True
}
],
"environment": [
{
"name": "http_proxy",
"value": "http://proxy.mycompany.com:8080"},
{
"name": "https_proxy",
"value": "http://proxy.mycompany.com:8080"
},
{
"name": "no_proxy",
"value": "localhost,127.0.0.1,169.254.169.254,169.254.170.2,.mycompany.com"
}
],
}
],
"volumes": [
{
"host": {
"sourcePath": "/var/lib/somefolder/"
},
"name": "myfolder"
}
]
}
}
# Setup required parent ecs_task_definition particle using a sample configuration
ecs_task_def = ECSTaskDefinition(ecs_task_def_example_json)
# example ECS Task config json
ecs_task_example_json = {
"pcf_name": 'pcf_ecs_service', # Required
"flavor": "ecs_service", # Required
"parents": [
ecs_cluster.get_pcf_id(), # Required. This replaces Cluster in aws_resource
ecs_task_def.get_pcf_id() # Required. This replaces taskDefinition in aws_resource
],
"aws_resource": {
# Refer to https://boto3.readthedocs.io/en/latest/reference/services/ecs.html#ECS.Client.run_task for a full list of parameters
#"count": 1, # Required
"launchType": "EC2"
}
}
# Setup ecs_service particle using a sample configuration
ecs_task = ECSTask(ecs_task_example_json)
pcf = PCF([])
pcf.add_particles((
ecs_cluster,
ecs_task_def,
ecs_task,
))
pcf.link_particles(pcf.particles)
pcf.apply(sync=True, cascade=True)
# example start
ecs_cluster.set_desired_state(State.running)
ecs_task_def.set_desired_state(State.running)
ecs_task.set_desired_state(State.running)
pcf.apply(sync=True, cascade=True)
print(ecs_task.get_state())
print(ecs_task.get_current_state_definition())
# example terminate
# ecs_task.set_desired_state(State.terminated)
# ecs_task.set_desired_state(State.terminated)
# ecs_task.set_desired_state(State.terminated)
# pcf.apply(sync=True, cascade=True)
# print(ecs_task.get_state())
|
0 | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/cloudformation | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/cloudformation/use_cloudformation/example_cloudformation_parameterized_template.py | # Copyright 2018 Capital One Services, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import os
from pcf.particle.aws.cloudformation.cloudformation_stack import CloudFormationStack
from pcf.core import State
template_file_location = os.path.join(sys.path[0], "example_cloudformation.yml")
with open(template_file_location, "r") as content_file:
content = content_file.read()
# Only included required fields
particle_definition = {
"pcf_name": "pcf_cloudformation",
"flavor": "cloudformation",
"aws_resource": {
"custom_config": {
"template_parameters": {
"var1": "variable one",
"var2": "variable two"
}
},
"StackName": "pcf-cloudformation",
"Tags": [
{
"Key": "Name",
"Value": "test"
}
],
"TemplateBody": content,
}
}
particle = CloudFormationStack(particle_definition)
particle.set_desired_state(State.running)
particle.apply(sync=True)
print(particle.get_state())
## Example Update by adding additional tags to the resource. Updating cloudformation template also triggers an update.
particle_definition['aws_resource']['Tags'].append({"Key": "Name2", "Value": "test2"})
particle.set_desired_state(State.running)
particle.apply(sync=True)
particle.set_desired_state(State.terminated)
particle.apply(sync=True)
print(particle.get_state())
|
0 | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/cloudformation | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/cloudformation/use_cloudformation/example_cloudformation.py | # Copyright 2018 Capital One Services, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import os
from pcf.particle.aws.cloudformation.cloudformation_stack import CloudFormationStack
from pcf.core import State
template_file_location = os.path.join(sys.path[0], "example_cloudformation.yml")
with open(template_file_location, "r") as content_file:
content = content_file.read()
## You can also pass in your cloudformation template configuratoin as a json
# example_cloudformation_template = {
# "AWSTemplateFormatVersion":"2010-09-09",
# "Description":"Example Project",
# "Resources":{
# "TestKinesis":{
# "Type":"AWS::Kinesis::Stream",
# "Properties":{
# "Name":"KinesisStreamCloudwatch",
# "ShardCount":1,
# "StreamEncryption":{
# "EncryptionType":"KMS",
# "KeyId":"alias/aws/kinesis"
# },
# "Tags":[
# {
# "Key":"Test1",
# "Value":"Test2"
# }
# ]
# }
# }
# }
# }
# content = json.dumps(content)
# Only included required fields
particle_definition = {
"pcf_name": "pcf_cloudformation",
"flavor": "cloudformation",
"aws_resource": {
"StackName": "pcf-cloudformation",
"Tags": [
{
"Key": "Name",
"Value": "test"
}
],
"TemplateBody": content,
}
}
particle = CloudFormationStack(particle_definition)
particle.set_desired_state(State.running)
particle.apply(sync=True)
print(particle.get_state())
## Example Update by adding additional tags to the resource. Updating cloudformation template also triggers an update.
particle_definition['aws_resource']['Tags'].append({"Key": "Name2", "Value": "test2"})
particle.set_desired_state(State.running)
particle.apply(sync=True)
particle.set_desired_state(State.terminated)
particle.apply(sync=True)
print(particle.get_state())
|
0 | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/cloudformation | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/cloudformation/use_cloudformation/example_cloudformation.yml | AWSTemplateFormatVersion: 2010-09-09
Description: 'Example Project'
Resources:
TestKinesis:
Type: AWS::Kinesis::Stream
Properties:
Name: "TestKinesisStreamCloudwatch"
#RetentionPeriodHours: Default 24 hours
ShardCount: 1
StreamEncryption:
EncryptionType: "KMS"
KeyId: "alias/aws/kinesis"
Tags:
- Key: Test
Value: Test2
- Key: {{ var1 }}
Value: {{ var2 }}
|
0 | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/rds | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/rds/use_rds/example_rds.py | from pcf.particle.aws.rds.rds_instance import RDS
from pcf.core import State
particle_definition = {
"pcf_name": "rds_test", # Required
"flavor": "rds_instance", # Required
"aws_resource": {
# Refer to https://boto3.readthedocs.io/en/latest/reference/services/rds.html#RDS.Client.create_db_instance for a full list of parameters
"DBInstanceIdentifier": "test-instance", # Required
"DBInstanceClass": "db.m3.medium", # Required
"Engine": "postgres", # Required
"MasterUsername": "postgres",
"DBName": "myDB",
"AllocatedStorage":10,
"BackupRetentionPeriod":30,
"AvailabilityZone":"us-east-1c",
"EngineVersion":"9.6.2",
"StorageEncrypted":True,
"KmsKeyId":"KMS-ID",
"Port":5432,
"DBSubnetGroupName":"subnetgroupname",
"VpcSecurityGroupIds":[
"sg-11111111"
],
"MasterUserPassword":"supersecret",
"ApplyImmediately": True,
"SkipFinalSnapshot": True,
"DBParameterGroupName": "isrm-postgres96",
"Tags": [
{
"Key": "email",
"Value": "your.email@example.com"
},
{
"Key": "name",
"Value": "John Doe"
}
],
"SkipFinalSnapshot": True
}
}
rds = RDS(particle_definition)
status = rds.get_status()
#Example for creating RDS instance
rds.set_desired_state(State.running)
rds.apply(sync=False)
#Example for updating RDS instance. RDS updates will happen during the maintenance window unless 'ApplyImmediately' field is set to true.
updated_def = particle_definition['aws_resource']['EngineVersion'] = "9.6.3"
rds = RDS(updated_def)
rds.set_desired_state(State.running)
rds.apply(sync=False)
#Example for deleting RDS instance
rds.set_desired_state(State.terminated)
rds.apply(sync=False)
print(rds.get_state())
|
0 | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/rds | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/rds/use_rds/README.md | # What does this example do?
- This example creates a RDS instance, updates the instance, then terminates it
|
0 | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/batch | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/batch/use_batch_compute_environment/example_batch_compute_environment.py | # Copyright 2018 Capital One Services, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from pcf.particle.aws.batch.batch_compute_environment import BatchComputeEnvironment
# Only included required fields. For all fields,
# https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/batch.html#Batch.Client.create_compute_environment
batch_def = {
"pcf_name": "pcf-example",
"flavor": "batch_compute_environment",
"aws_resource": {
"computeEnvironmentName": "test",
"type": "UNMANAGED",
"serviceRole": "AWSBatchServiceRole"
}
}
particle = BatchComputeEnvironment(batch_def)
particle.set_desired_state("running")
particle.apply()
print(particle.current_state_definition)
print(particle.state)
particle.set_desired_state("terminated")
particle.apply(sync=True)
print(particle.state)
|
0 | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/batch | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/batch/use_batch_compute_environment/README.md | # What does this example do?
- This example creates a Batch Compute Environment, then terminates it
|
0 | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/batch | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/batch/use_batch_job_queue/example_batch_job_queue.py | # Copyright 2018 Capital One Services, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from pcf.particle.aws.batch.batch_job_queue import BatchJobQueue
# Only included required fields. For all fields,
# https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/batch.html#Batch.Client.create_job_queue
batch_def = {
"pcf_name": "pcf-example",
"flavor": "batch_job_queue",
"aws_resource": {
"jobQueueName": "test",
"state":"ENABLED",
"priority":1,
"computeEnvironmentOrder": [
{
'order': 1,
'computeEnvironment': "COMPUTE_ENVIRONMENT_ARN" # replace
},
]
}
}
particle = BatchJobQueue(batch_def)
particle.set_desired_state("running")
particle.apply()
print(particle.current_state_definition)
print(particle.state)
particle.set_desired_state("terminated")
particle.apply(sync=True)
print(particle.state)
|
0 | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/batch | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/batch/use_batch_job_queue/README.md | # What does this example do?
- This example creates a Batch Job Queue, then terminates it
|
0 | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/lambda_function | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/lambda_function/use_lambda_function/example_lambda.py | from pcf.core import State
from pcf.particle.aws.lambda_function.lambda_function import LambdaFunction
# Edit example json to work in your account
# example lambda with function in local zip file
lambda_function_example_zip_json = {
"pcf_name": "lambda_test", # Required
"flavor": "lambda_function", # Required
"aws_resource": {
# Refer to https://boto3.readthedocs.io/en/latest/reference/services/lambda.html#Lambda.Client.create_function for a full list of parameters
"FunctionName": "PCFTest", # Required
"Runtime": "python3.6", # Required
"Timeout":30,
"Role": "arn:aws:iam::account-id:role/lambda-role", # Required
"Handler": "function_trigger.trigger_handler", # Required
"Code":{"ZipFile": "lambda_function.zip"} # Required
}
}
# example lambda with function in a zip file in s3
lambda_function_example_s3_json = {
"pcf_name": "lambda_test", # Required
"flavor": "lambda_function", # Required
"aws_resource": {
# Refer to https://boto3.readthedocs.io/en/latest/reference/services/lambda.html#Lambda.Client.create_function for a full list of parameters
"FunctionName": "PCFTest", # Required
"Runtime": "python3.6", # Required
"Timeout": 50,
"Role": "arn:aws:iam::account-id:role/lambda-role", # Required
"Handler": "function_trigger.trigger_handler", # Required
"Code": {"S3Bucket": "pcf-lambda","S3Key": "lambda_function.zip"},
"Environment": {"Variables":{"test": "letsgo"}}
}
}
# create lambda particle using local zip file or s3
lambda_function_particle = LambdaFunction(lambda_function_example_zip_json)
# lambda_function_particle = LambdaFunction(lambda_function_example_s3_json)
# example start
lambda_function_particle.set_desired_state(State.running)
lambda_function_particle.apply()
print(lambda_function_particle.get_state())
# example update
updated_def = lambda_function_example_zip_json
updated_def["aws_resource"]["Timeout"] = 40
lambda_function_particle = LambdaFunction(updated_def)
lambda_function_particle.set_desired_state(State.running)
lambda_function_particle.apply()
print(lambda_function_particle.get_state())
print(lambda_function_particle.get_current_state_definition())
# example terminate
lambda_function_particle.set_desired_state(State.terminated)
lambda_function_particle.apply()
print(lambda_function_particle.get_state())
|
0 | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/lambda_function | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/lambda_function/use_lambda_function/README.md | # What does this example do?
- This example creates a Lambda function using a local file, then terminates it. You must supply the lambda zip file.
This also contains an example of creating a lambda function using a zip file in s3.
|
0 | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/ec2 | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/ec2/use_callbacks/example_userdata.sh.j2 | # Export environment variables
{% for ENVIRONMENT_VARIABLE in ENVIRONMENT_VARIABLES %}
export {{ENVIRONMENT_VARIABLE}}
{% endfor %}
# Run your code here
echo {{VAR1}}
echo {{VAR2}}
|
0 | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/ec2 | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/ec2/use_callbacks/README.md | # What does this example do?
- This example spins shows how you can trigger custom callback functions after various start changes (start, stop, terminate)
|
0 | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/ec2 | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/ec2/use_callbacks/example_callbacks.py | from pcf.particle.aws.ec2.ec2_instance import EC2Instance
from pcf.core import State
# Edit example json to work in your account
def example_start_callback():
print("callback triggered after start")
def example_terminate_callback(text):
print(text)
# example ec2 instance json
ec2_instance_example_json = {
"pcf_name": "ec2-example", # Required
"flavor": "ec2_instance", # Required
"callbacks": {
"start": {"function": example_start_callback},
"terminate": {"function": example_terminate_callback, "kwargs": {"text": "terminate"}}
},
"aws_resource": {
"custom_config": {
"instance_name": "my-instance", # Required
"userdata_params": {
"ENVIRONMENT_VARIABLES": [
"PROXY=http://proxy.mycompany.com:8080",
"ABC=123"
],
"VAR1": "hello",
"VAR2": "world"
},
"userdata_wait": True,
"userdata_bash": True,
"tags": {
"Name": "pcf-ec2-example",
"Tag1": "hello",
"Tag2": "world",
}
},
# Refer to https://boto3.readthedocs.io/en/latest/reference/services/ec2.html#EC2.ServiceResource.create_instances for a full list of parameters
"ImageId": "ami-11111111", # Required
"InstanceType": "t2.nano",
"KeyName": "my-key",
"MaxCount": 1,
"MinCount": 1,
"SecurityGroupIds": [
"sg-11111111",
"sg-22222222"
],
"SubnetId": "subnet-11111111", # Required
"IamInstanceProfile": {
"Arn": "arn:aws:iam::111111111111:instance-profile/someRole"
},
"BlockDeviceMappings": [ # Required
{
"DeviceName": "/dev/sda1", # DeviceName changes for different Linux distro
"Ebs": {
"DeleteOnTermination": True,
"VolumeSize": 20,
"VolumeType": "gp2"
}
}
]
}
}
# Setup ec2_instance particle using a sample configuration
ec2_instance_particle = EC2Instance(ec2_instance_example_json)
# example start
ec2_instance_particle.set_desired_state(State.running)
ec2_instance_particle.apply()
print(ec2_instance_particle.get_state())
print(ec2_instance_particle.get_current_state_definition())
# example terminate
ec2_instance_particle.set_desired_state(State.terminated)
ec2_instance_particle.apply()
print(ec2_instance_particle.get_state())
|
0 | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/ec2 | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/ec2/use_ebs_volume/example_ebs_volume.py | from pcf.particle.aws.ec2.ebs_volume import EBSVolume
from pcf.core import State
# example ec2 instance json
ebs_volume_example_json = {
"pcf_name": "ebs-example", # Required
"flavor": "ebs_volume", # Required
"aws_resource": {
# Refer to https://boto3.readthedocs.io/en/latest/reference/services/ec2.html#EC2.ServiceResource.create_volume for a full list of parameters
'AvailabilityZone': 'us-east-1a', # Required
'Encrypted': True,
'Size': 20, # Required
'SnapshotId': "snap-11111111111111111",
'VolumeType': 'gp2', # 'standard'|'io1'|'gp2'|'sc1'|'st1' # Required
'Tags': {
'Name': "pcf-ebs-example", # Required
"Tag1": "hello",
"Tag2": "world"
},
}
}
# Setup ebs volume particle using a sample configuration
ebs = EBSVolume(ebs_volume_example_json)
# example start
ebs.desired_state = State.running
ebs.apply()
print('volume:', ebs.volume_id)
print('state:', ebs.state)
print('definition:', ebs.current_state_definition)
ebs_volume_example_json['aws_resource']['Tags'].pop('Tag1')
ebs_volume_example_json['aws_resource']['Tags']['Tag2'] = 'bye'
ebs_volume_example_json['aws_resource']['Size'] = 30
ebs_volume_example_json['aws_resource']['VolumeId'] = ebs.volume_id
# example update
updated_ebs = EBSVolume(ebs_volume_example_json)
updated_ebs.desired_state = State.running
updated_ebs.apply()
print('volume:', updated_ebs.volume_id)
print('state:', updated_ebs.state)
print('definition:', updated_ebs.current_state_definition)
# example terminate
updated_ebs.desired_state = State.terminated
updated_ebs.apply()
print('volume (%s) terminated' % updated_ebs.volume_id)
|
0 | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/ec2 | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/ec2/use_ebs_volume/README.md | # What does this example do?
- This example spins up an EBS volume, update EBS volume, then terminates EBS volume
|
0 | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/ec2 | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/ec2/use_validate_config/example_userdata.sh.j2 | # Export environment variables
{% for ENVIRONMENT_VARIABLE in ENVIRONMENT_VARIABLES %}
export {{ENVIRONMENT_VARIABLE}}
{% endfor %}
# Run your code here
echo {{VAR1}}
echo {{VAR2}}
|
0 | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/ec2 | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/ec2/use_validate_config/README.md | # What does this example do?
- This example spins up an EC2 instance, runs userdata, and spins down the EC2 instance
|
0 | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/ec2 | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/ec2/use_validate_config/example_ec2_instance.py | from pcf.particle.aws.ec2.ec2_instance import EC2Instance
from pcf.core import State
from pcf.core.pcf_exceptions import InvalidConfigException
# Particle that adds custom input validation rule on top of EC2Instance particle
class CustomEC2Instance(EC2Instance):
flavor = "custom_ec2_instance"
def __init__(self, particle_definition):
super(CustomEC2Instance, self).__init__(particle_definition)
def _validate_config(self):
"""
Custom logic that that validates particle's configurations
"""
if self.desired_state_definition.get("custom_config").get("tags").get("Tag1") is None:
raise InvalidConfigException
# Edit example json to work in your account
# example ec2 instance json
ec2_instance_example_json = {
"pcf_name": "ec2-example", # Required
"flavor": "ec2_instance", # Required
"aws_resource": {
"custom_config": {
"instance_name": "my-instance", # Required
"userdata_params": {
"ENVIRONMENT_VARIABLES": [
"PROXY=http://proxy.mycompany.com:8080",
"ABC=123"
],
"VAR1": "hello",
"VAR2": "world"
},
"userdata_wait": True,
"userdata_bash": True,
"tags": {
"Name": "pcf-ec2-example",
#"Tag1": "hello", # Required
"Tag2": "world",
}
},
# Refer to https://boto3.readthedocs.io/en/latest/reference/services/ec2.html#EC2.ServiceResource.create_instances for a full list of parameters
"ImageId": "ami-11111111", # Required
"InstanceType": "t2.nano",
"KeyName": "my-key",
"MaxCount": 1,
"MinCount": 1,
"SecurityGroupIds": [
"sg-11111111",
"sg-22222222"
],
"SubnetId": "subnet-11111111", # Required
"IamInstanceProfile": {
"Arn": "arn:aws:iam::111111111111:instance-profile/someRole"
},
"BlockDeviceMappings": [ # Required
{
"DeviceName": "/dev/sda1", # DeviceName changes for different Linux distro
"Ebs": {
"DeleteOnTermination": True,
"VolumeSize": 20,
"VolumeType": "gp2"
}
}
]
}
}
# Setup ec2_instance particle using a sample configuration
ec2_instance_particle = CustomEC2Instance(ec2_instance_example_json)
# example start
ec2_instance_particle.set_desired_state(State.running)
ec2_instance_particle.apply(validate_config=True)
|
0 | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/ec2 | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/ec2/override_ec2_start_function/example_ec2_override_start.py | from pcf.particle.aws.ec2.ec2_instance import EC2Instance
from pcf.core import State
# Edit example json to work in your account
# example ec2 instance json
ec2_instance_example_json = {
"pcf_name": "ec2-test", # Required
"flavor": "ec2_instance", # Required
"aws_resource": {
"custom_config": {
"instance_name": "ec2-test", # Required
"userdata_template_file": "example_userdata.sh.j2",
"userdata_params": {
"ENVIRONMENT_VARIABLES": [
"PROXY=http://proxy.mycompany.com:8080",
],
# Custom params that match userdata jinja template
"var1": "hello",
"var2": "world"
}
},
# Refer to https://boto3.readthedocs.io/en/latest/reference/services/ec2.html#EC2.ServiceResource.create_instances for a full list of parameters
"ImageId": "ami-11111111", # Required
"InstanceType": "t2.nano", # Required
"KeyName": "my-key",
"MaxCount": 1, # Required
"MinCount": 1, # Required
"SecurityGroupIds": [
"sg-11111111",
"sg-22222222"
],
"SubnetId": "subnet-11111111", # Required
"IamInstanceProfile": {
"Arn": "arn:aws:iam::111111111111:instance-profile/AAAAAAAAAA"
},
"tags": {
"TAG1": "HELLO",
"TAG2": "WORLD"
},
"BlockDeviceMappings": [ # Required
{
"DeviceName": "/dev/sda1", # DeviceName changes for different Linux distro
"Ebs": {
"DeleteOnTermination": True,
"VolumeSize": 20,
"VolumeType": "gp2"
}
}
]
}
}
# Setup ec2_instance particle using a sample configuration
ec2_instance_particle = EC2Instance(ec2_instance_example_json)
# New user defined EC2 instance start function
class NewEC2Start(EC2Instance):
def __init__(self, particle_definition):
super(NewEC2Start, self).__init__(particle_definition)
def __call__(self):
print("I'm new!")
try:
instance_id = self.get_instance_id()
except TooManyResourceException:
raise TooManyResourceException()
except NoResourceException:
return self.create()
if self.state == State.stopped:
return self.client.start_instances(InstanceIds=[instance_id])
ec2_instance_particle._start = NewEC2Start(ec2_instance_example_json)
# example start
ec2_instance_particle.set_desired_state(State.running)
ec2_instance_particle.apply()
print(ec2_instance_particle.get_state())
print(ec2_instance_particle.get_current_state_definition())
|
0 | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/ec2 | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/ec2/override_ec2_start_function/example_userdata.sh.j2 | # Export environment variables
{% for ENVIRONMENT_VARIABLE in ENVIRONMENT_VARIABLES %}
export {{ENVIRONMENT_VARIABLE}}
{% endfor %}
# Run your code here
echo {{var1}}
echo {{var2}}
|
0 | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/ec2 | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/ec2/override_ec2_start_function/README.md | # What does this example do?
- This example shows how to override the `_start` function in ec2_instance particle. Same method can be applied to all other particle lifecycle functions - _start, _stop, terminate, _update
|
0 | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/ec2 | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/ec2/use_launch_config/example_launch_config.py | from pcf.particle.aws.ec2.autoscaling.launch_configuration import LaunchConfiguration
from pcf.core import State
# Edit example json to work in your account
# example lc json
asg_lc_example_json = {
"pcf_name": "launch-configuration-example", # Required
"flavor": "launch_configuration", # Required
"aws_resource": {
# Refer to https://boto3.readthedocs.io/en/latest/reference/services/autoscaling.html#AutoScaling.Client.create_launch_configuration for a full list of parameters
"LaunchConfigurationName": "pcf-launch-config-example", # Required
"InstanceType": "t2.nano", # Required
"KeyName": "my-key",
"IamInstanceProfile": "AAAAAAAAAA",
"ImageId": "ami-11111111" # Required
}
}
# Setup autoscaling group launch configuration particle using a sample configuration
lc_particle = LaunchConfiguration(asg_lc_example_json)
# example start
lc_particle.set_desired_state(State.running)
lc_particle.apply()
print(lc_particle.get_state())
# Launch Configuration has no update
# example terminate
lc_particle.set_desired_state(State.terminated)
lc_particle.apply()
print(lc_particle.get_state())
|
0 | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/ec2 | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/ec2/use_launch_config/README.md | # What does this example do?
- This example creates an EC2 Auto Scaling Group (ASG) Launch Configuration, then terminates it. Note that updating launch configuration is not allowed
|
0 | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/ec2 | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/ec2/use_asg/example_asg.py | from pcf.particle.aws.ec2.autoscaling.auto_scaling_group import AutoScalingGroup
from pcf.core import State
# Edit example json to work in your account
# example asg json
asg_instance_example_json = {
"pcf_name": "asg-test", # Required
"flavor": "auto_scaling_group", # Required
"aws_resource": {
# Refer to https://boto3.readthedocs.io/en/latest/reference/services/autoscaling.html#AutoScaling.Client.create_auto_scaling_group for a full list of parameters
"AutoScalingGroupName": "asg-test", # Required
"LaunchConfigurationName": "AAAAAAAA", # Required
"MinSize": 1, # Required
"MaxSize": 3, # Required
"VPCZoneIdentifier": "subnet-1111111" # Required
}
}
# Setup asg_instance particle using a sample configuration
asg_particle = AutoScalingGroup(asg_instance_example_json)
# example start
asg_particle.set_desired_state(State.running)
asg_particle.apply()
print(asg_particle.get_state())
# example update
updated_def = asg_instance_example_json
updated_def['aws_resource']['MaxSize'] = 2
asg_particle = AutoScalingGroup(updated_def)
asg_particle.set_desired_state(State.running)
asg_particle.apply()
print(asg_particle.get_state())
print(asg_particle.get_current_state_definition())
# example terminate
asg_particle.set_desired_state(State.terminated)
asg_particle.apply()
print(asg_particle.get_state())
|
0 | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/ec2 | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/ec2/use_asg/README.md | # What does this example do?
- This example creates an EC2 Auto scaling group (ASG), update the max size of the ASG, and terminates the ASG
|
0 | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/ec2 | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/ec2/use_ec2_instance/example_userdata.sh.j2 | # Export environment variables
{% for ENVIRONMENT_VARIABLE in ENVIRONMENT_VARIABLES %}
export {{ENVIRONMENT_VARIABLE}}
{% endfor %}
# Run your code here
echo {{VAR1}}
echo {{VAR2}}
|
0 | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/ec2 | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/ec2/use_ec2_instance/README.md | # What does this example do?
- This example spins up an EC2 instance, runs userdata, and spins down the EC2 instance
|
0 | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/ec2 | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/ec2/use_ec2_instance/example_ec2_instance.py | from pcf.particle.aws.ec2.ec2_instance import EC2Instance
from pcf.core import State
# Edit example json to work in your account
# example ec2 instance json
ec2_instance_example_json = {
"pcf_name": "ec2-example", # Required
"flavor": "ec2_instance", # Required
"aws_resource": {
"custom_config": {
"instance_name": "my-instance", # Required
"userdata_params": {
"ENVIRONMENT_VARIABLES": [
"PROXY=http://proxy.mycompany.com:8080",
"ABC=123"
],
"VAR1": "hello",
"VAR2": "world"
},
"userdata_wait": False, # This feature relies on IamInstanceProfile role having EC2 write access
"userdata_bash": True,
"tags": {
"Name": "pcf-ec2-example",
"Tag1": "hello",
"Tag2": "world",
}
},
# Refer to https://boto3.readthedocs.io/en/latest/reference/services/ec2.html#EC2.ServiceResource.create_instances for a full list of parameters
"ImageId": "ami-11111111", # Required
"InstanceType": "t2.nano",
"KeyName": "my-key",
"MaxCount": 1,
"MinCount": 1,
"SecurityGroupIds": [
"sg-11111111",
"sg-22222222"
],
"SubnetId": "subnet-11111111", # Required
"IamInstanceProfile": {
"Arn": "arn:aws:iam::111111111111:instance-profile/someRole"
},
"BlockDeviceMappings": [ # Required
{
"DeviceName": "/dev/sda1", # DeviceName changes for different Linux distro
"Ebs": {
"DeleteOnTermination": True,
"VolumeSize": 20,
"VolumeType": "gp2"
}
}
]
}
}
# Setup ec2_instance particle using a sample configuration
ec2_instance_particle = EC2Instance(ec2_instance_example_json)
# example start
ec2_instance_particle.set_desired_state(State.running)
ec2_instance_particle.apply()
print(ec2_instance_particle.get_state())
print(ec2_instance_particle.get_current_state_definition())
# example terminate
ec2_instance_particle.set_desired_state(State.terminated)
ec2_instance_particle.apply()
print(ec2_instance_particle.get_state())
|
0 | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/ec2 | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/ec2/params_to_ec2_userdata/example_ec2_run_userdata.py | from pcf.particle.aws.ec2.ec2_instance import EC2Instance
from pcf.core import State
import os
from multiprocessing import Pool
def fn(param):
# example ec2 instance json
ec2_instance_example_json = {
"pcf_name": "ec2-test", # Required
"flavor": "ec2_instance", # Required
"aws_resource": {
"custom_config": {
"instance_name": "pcf-test-{}".format(param["idx"]), # Required
"userdata_template_file": "particle/aws/ec2/ec2_run_userdata/example_userdata.sh.j2",
"userdata_params": {
"ENVIRONMENT_VARIABLES": [
"PROXY=http://proxy.mycompany.com:8080",
"HTTP_PROXY=$PROXY",
"HTTPS_PROXY=$PROXY",
"http_proxy=$PROXY",
"https_proxy=$PROXY",
"NO_PROXY=169.254.169.254,.mycompany.com,127.0.0.1,localhost,$(curl -s http://169.254.169.254/latest/meta-data/local-ipv4/)",
"no_proxy=$NO_PROXY"
],
# Custom params that match userdata jinja template
"var1": param["var1"],
"var2": param["var2"]
},
"tags": {
"Name": "pcf-test",
"Project": "PCF"
},
"userdata_wait": True, # This feature relies on IamInstanceProfile role having EC2 write access
},
# Refer to https://boto3.readthedocs.io/en/latest/reference/services/ec2.html#EC2.ServiceResource.create_instances for a full list of parameters
"ImageId": "ami-11111111", # Required
"InstanceType": "t2.nano", # Required
"KeyName": "my-key",
"MaxCount": 1,
"MinCount": 1,
"SecurityGroupIds": [
"sg-11111111",
"sg-22222222"
],
"SubnetId": "subnet-11111111", # Required
"IamInstanceProfile": {
"Arn": "arn:aws:iam::111111111111:instance-profile/AAAAAAAAAA"
},
"BlockDeviceMappings": [ # Required
{
# "DeviceName": "/dev/sda1", # For any Debian based Liunx distro (ex: Debian, Ubuntu, RHEL)
"DeviceName": "/dev/xvda", # DeviceName changes for different Linux distro
"Ebs": {
"DeleteOnTermination": True,
"VolumeSize": 100,
"VolumeType": "gp2"
}
}
]
}
}
# Setup ec2_instance particle using sample configuration
ec2_instance_particle = EC2Instance(ec2_instance_example_json)
# Start EC2 instance and run your code in Userdata
instance_name = ec2_instance_example_json['aws_resource']['custom_config']['instance_name']
print("Creating EC2 instance {} and running your code in Userdata".format(instance_name))
ec2_instance_particle.set_desired_state(State.running)
ec2_instance_particle.apply()
print("{}: {}".format(instance_name, ec2_instance_particle.get_state()))
# Terminate EC2 instance after Userdata script is done running
print("Terminating EC2 instance {}".format(instance_name))
ec2_instance_particle.set_desired_state(State.terminated)
ec2_instance_particle.apply()
print("{}: {}".format(instance_name, ec2_instance_particle.get_state()))
# List of parameters for variables in your code
params = [
{
"idx": 0,
"var1": "Hello",
"var2": "World"
},
{
"idx": 1,
"var1": "ABC",
"var2": "123"
},
{
"idx": 2,
"var1": "XYZ",
"var2": "456"
}
]
pool = Pool(processes=len(params))
pool.map(fn, params)
|
0 | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/ec2 | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/ec2/params_to_ec2_userdata/example_userdata.sh.j2 | # Export environment variables
{% for ENVIRONMENT_VARIABLE in ENVIRONMENT_VARIABLES %}
export {{ENVIRONMENT_VARIABLE}}
{% endfor %}
# Run your code here
echo {{var1}}
echo {{var2}}
|
0 | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/ec2 | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/ec2/params_to_ec2_userdata/README.md | # What does this example do?
- This example uses multiprocessing to create a list of EC2 instances, provides different input parameters to each userdata (aka init script) teamplate, waits for userdata to finish running before terminating the instances
|
0 | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/ec2 | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/ec2/use_alb/example_alb.py | from pcf.particle.aws.ec2.alb.alb import ApplicationLoadBalancing
from pcf.core import State
# Edit example json to work in your account
# example alb json
alb_example_json = {
"pcf_name": "alb-example", # Required
"flavor": "alb", # Required
# Refer to https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/elbv2.html#ElasticLoadBalancingv2.Client.create_load_balancer for a full list of parameters
"aws_resource": {
"Name": "alb-example", # Required
"Subnets":
['subnet-11111111', 'subnet-22222222'], # Required or [SubnetMappings]
"SecurityGroups": ['sg-11111111'],
"Scheme": "internal",
"Tags": [
{'Key': 'Name', 'Value': 'pcf-alb-example'},
{'Key':'Tag1', 'Value': 'hello'},
{'Key':'Tag2', 'Value': 'world'},
],
"Type": "application",
"IpAddressType": "ipv4"
}
}
# Setup alb particle using a sample configuration
alb_particle = ApplicationLoadBalancing(alb_example_json)
# example start
alb_particle.set_desired_state(State.running)
alb_particle.apply(sync=False)
print(alb_particle.get_state())
print(alb_particle.desired_state_definition)
# example update
updated_def = alb_example_json
updated_def['aws_resource']['Tags'][1]['Value'] = 'bye'
updated_particle = ApplicationLoadBalancing(updated_def)
updated_particle.set_desired_state(State.running)
updated_particle.apply(sync=False)
print(updated_particle.get_state())
print(updated_particle.get_current_state_definition())
#example terminate
updated_particle.set_desired_state(State.terminated)
updated_particle.apply()
print(updated_particle.get_state())
|
0 | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/ec2 | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/ec2/use_alb/README.md | # What does this example do?
- This example spins up an Application Load Balancer, updates the ALB, then terminates the ALB
|
0 | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/sqs | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/sqs/use_SQS/example_SQS.py | from pcf.core import State
from pcf.particle.aws.sqs.sqs_queue import SQSQueue
# example SQS particle
particle_definition = {
"pcf_name": "gg-pcf",
"flavor": "sqs_queue",
"aws_resource": {
"QueueName": "test_SQS_queue.fifo", # Required
# "OwnerAwsId": "owner", # only if the queue belongs to a different user
"Attributes": {
# https://boto3.readthedocs.io/en/latest/reference/services/sqs.html#SQS.Client.create_queue
# for all the validation criteria from boto3
"DelaySeconds": "0",
"MaximumMessageSize": "262144",
"MessageRetentionPeriod": "345600",
"Policy": "AWS policy",
"ReceiveMessageWaitTimeSeconds": "20",
# "RedrivePolicy": "{}",
"VisibilityTimeout": "43200",
"KmsMasterKeyId": "enc/sqs",
"KmsDataKeyReusePeriodSeconds": "300",
"FifoQueue": "true",
"ContentBasedDeduplication": "true",
# "ApproximateNumberOfMessages": "1",
# "ApproximateNumberOfMessagesDelayed": "0",
# "ApproximateNumberOfMessagesNotVisible": "0",
# "CreatedTimestamp": "1534276486.369445",
# "LastModifiedTimestamp": "1534276486.369445",
"QueueArn": "arn:aws:sqs:us-east-1:123456789012:test_SQS_queue.fifo"
},
"Tags": {
"test_tag": "value",
"remove_tag": "bye"
}
}
}
# create sqs particle using json
sqs_particle = SQSQueue(particle_definition)
# example start
sqs_particle.set_desired_state(State.running)
sqs_particle.apply()
print(sqs_particle.get_state())
print(sqs_particle.get_current_definition())
print(sqs_particle.get_current_state_definition())
# example update
updated_def = particle_definition
updated_def["aws_resource"]["Attributes"]["MaximumMessageSize"] = "262143" # reset existing
updated_def["aws_resource"]["Attributes"]["ContentBasedDeduplication"] = "false" # add new
updated_def["aws_resource"]["Tags"]["new_tag"] = "new" # new tag
updated_def["aws_resource"]["Tags"]["test_tag"] = "changed" # reset tag
updated_def["aws_resource"]["Tags"].pop("remove_tag") # remove tag
sqs_particle = SQSQueue(updated_def)
sqs_particle.set_desired_state(State.running)
sqs_particle.apply()
print(sqs_particle.get_state())
print(sqs_particle.get_current_definition())
print(sqs_particle.get_current_state_definition())
# example terminate
sqs_particle.set_desired_state(State.terminated)
sqs_particle.apply()
print(sqs_particle.get_state())
|
0 | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/sqs | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/sqs/use_SQS/README.md | # What does this example do?
- This example creates a SQS queue, updates it in different ways, then terminates it
|
0 | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/efs | capitalone_repos/Particle-Cloud-Framework/examples/particle/aws/efs/use_efs_instance/example_efs_instance.py | from pcf.particle.aws.efs.efs_instance import EFSInstance
from pcf.core import State
# example efs instance json
efs_instance_example_json = {
"pcf_name": "pcf_efs", # Required
"flavor": "efs_instance", # Required
"aws_resource": {
"custom_config": {
"instance_name": "efs-instance", # Required
},
"CreationToken": "pcfFileSystem", # Required
"PerformanceMode": "generalPurpose"
}
}
efs_instance_particle = EFSInstance(efs_instance_example_json)
#example start
efs_instance_particle.set_desired_state(State.running)
efs_instance_particle.apply()
print(efs_instance_particle.get_state())
print(efs_instance_particle.get_current_state_definition())
# example tags
tags = [
{
'Key': 'key1',
'Value': 'value1'
},
{
'Key': 'key2',
'Value': 'value2'
}
]
efs_instance_particle.create_tags(tags)
print(efs_instance_particle.describe_tags())
#example delete tag
key_value = [
'key1',
]
efs_instance_particle.delete_tags(key_value)
# example terminate
efs_instance_particle.set_desired_state(State.terminated)
efs_instance_particle.apply()
print(efs_instance_particle.get_state())
|