code
stringlengths 1
199k
|
|---|
from django import forms
from django.core.exceptions import ValidationError
from django.core.validators import validate_slug
from django.db import models
from django.utils import simplejson as json
from django.utils.text import capfirst
from django.utils.translation import ugettext_lazy as _
from philo.forms.fields import JSONFormField
from philo.utils.registry import RegistryIterator
from philo.validators import TemplateValidator, json_validator
class TemplateField(models.TextField):
"""A :class:`TextField` which is validated with a :class:`.TemplateValidator`. ``allow``, ``disallow``, and ``secure`` will be passed into the validator's construction."""
def __init__(self, allow=None, disallow=None, secure=True, *args, **kwargs):
super(TemplateField, self).__init__(*args, **kwargs)
self.validators.append(TemplateValidator(allow, disallow, secure))
class JSONDescriptor(object):
def __init__(self, field):
self.field = field
def __get__(self, instance, owner):
if instance is None:
raise AttributeError # ?
if self.field.name not in instance.__dict__:
json_string = getattr(instance, self.field.attname)
instance.__dict__[self.field.name] = json.loads(json_string)
return instance.__dict__[self.field.name]
def __set__(self, instance, value):
instance.__dict__[self.field.name] = value
setattr(instance, self.field.attname, json.dumps(value))
def __delete__(self, instance):
del(instance.__dict__[self.field.name])
setattr(instance, self.field.attname, json.dumps(None))
class JSONField(models.TextField):
"""A :class:`TextField` which stores its value on the model instance as a python object and stores its value in the database as JSON. Validated with :func:`.json_validator`."""
default_validators = [json_validator]
def get_attname(self):
return "%s_json" % self.name
def contribute_to_class(self, cls, name):
super(JSONField, self).contribute_to_class(cls, name)
setattr(cls, name, JSONDescriptor(self))
models.signals.pre_init.connect(self.fix_init_kwarg, sender=cls)
def fix_init_kwarg(self, sender, args, kwargs, **signal_kwargs):
# Anything passed in as self.name is assumed to come from a serializer and
# will be treated as a json string.
if self.name in kwargs:
value = kwargs.pop(self.name)
# Hack to handle the xml serializer's handling of "null"
if value is None:
value = 'null'
kwargs[self.attname] = value
def formfield(self, *args, **kwargs):
kwargs["form_class"] = JSONFormField
return super(JSONField, self).formfield(*args, **kwargs)
class SlugMultipleChoiceField(models.Field):
"""Stores a selection of multiple items with unique slugs in the form of a comma-separated list. Also knows how to correctly handle :class:`RegistryIterator`\ s passed in as choices."""
__metaclass__ = models.SubfieldBase
description = _("Comma-separated slug field")
def get_internal_type(self):
return "TextField"
def to_python(self, value):
if not value:
return []
if isinstance(value, list):
return value
return value.split(',')
def get_prep_value(self, value):
return ','.join(value)
def formfield(self, **kwargs):
# This is necessary because django hard-codes TypedChoiceField for things with choices.
defaults = {
'widget': forms.CheckboxSelectMultiple,
'choices': self.get_choices(include_blank=False),
'label': capfirst(self.verbose_name),
'required': not self.blank,
'help_text': self.help_text
}
if self.has_default():
if callable(self.default):
defaults['initial'] = self.default
defaults['show_hidden_initial'] = True
else:
defaults['initial'] = self.get_default()
for k in kwargs.keys():
if k not in ('coerce', 'empty_value', 'choices', 'required',
'widget', 'label', 'initial', 'help_text',
'error_messages', 'show_hidden_initial'):
del kwargs[k]
defaults.update(kwargs)
form_class = forms.TypedMultipleChoiceField
return form_class(**defaults)
def validate(self, value, model_instance):
invalid_values = []
for val in value:
try:
validate_slug(val)
except ValidationError:
invalid_values.append(val)
if invalid_values:
# should really make a custom message.
raise ValidationError(self.error_messages['invalid_choice'] % invalid_values)
def _get_choices(self):
if isinstance(self._choices, RegistryIterator):
return self._choices.copy()
elif hasattr(self._choices, 'next'):
choices, self._choices = itertools.tee(self._choices)
return choices
else:
return self._choices
choices = property(_get_choices)
try:
from south.modelsinspector import add_introspection_rules
except ImportError:
pass
else:
add_introspection_rules([], ["^philo\.models\.fields\.SlugMultipleChoiceField"])
add_introspection_rules([], ["^philo\.models\.fields\.TemplateField"])
add_introspection_rules([], ["^philo\.models\.fields\.JSONField"])
|
import hashlib
import json
import logging
import os
import subprocess
import sys
import time
from collections import defaultdict
from shutil import copy
from shutil import copyfile
from shutil import copystat
from shutil import copytree
from tempfile import mkdtemp
import boto3
import botocore
import yaml
import sys
from .helpers import archive
from .helpers import get_environment_variable_value
from .helpers import LambdaContext
from .helpers import mkdir
from .helpers import read
from .helpers import timestamp
ARN_PREFIXES = {
"cn-north-1": "aws-cn",
"cn-northwest-1": "aws-cn",
"us-gov-west-1": "aws-us-gov",
}
log = logging.getLogger(__name__)
def load_source(module_name, module_path):
"""Loads a python module from the path of the corresponding file."""
if sys.version_info[0] == 3 and sys.version_info[1] >= 5:
import importlib.util
spec = importlib.util.spec_from_file_location(module_name, module_path)
module = importlib.util.module_from_spec(spec)
spec.loader.exec_module(module)
elif sys.version_info[0] == 3 and sys.version_info[1] < 5:
import importlib.machinery
loader = importlib.machinery.SourceFileLoader(module_name, module_path)
module = loader.load_module()
return module
def cleanup_old_versions(
src, keep_last_versions, config_file="config.yaml", profile_name=None,
):
"""Deletes old deployed versions of the function in AWS Lambda.
Won't delete $Latest and any aliased version
:param str src:
The path to your Lambda ready project (folder must contain a valid
config.yaml and handler module (e.g.: service.py).
:param int keep_last_versions:
The number of recent versions to keep and not delete
"""
if keep_last_versions <= 0:
print("Won't delete all versions. Please do this manually")
else:
path_to_config_file = os.path.join(src, config_file)
cfg = read_cfg(path_to_config_file, profile_name)
profile_name = cfg.get("profile")
aws_access_key_id = cfg.get("aws_access_key_id")
aws_secret_access_key = cfg.get("aws_secret_access_key")
client = get_client(
"lambda",
profile_name,
aws_access_key_id,
aws_secret_access_key,
cfg.get("region"),
)
response = client.list_versions_by_function(
FunctionName=cfg.get("function_name"),
)
versions = response.get("Versions")
if len(response.get("Versions")) < keep_last_versions:
print("Nothing to delete. (Too few versions published)")
else:
version_numbers = [
elem.get("Version") for elem in versions[1:-keep_last_versions]
]
for version_number in version_numbers:
try:
client.delete_function(
FunctionName=cfg.get("function_name"),
Qualifier=version_number,
)
except botocore.exceptions.ClientError as e:
print(f"Skipping Version {version_number}: {e}")
def deploy(
src,
requirements=None,
local_package=None,
config_file="config.yaml",
profile_name=None,
preserve_vpc=False,
):
"""Deploys a new function to AWS Lambda.
:param str src:
The path to your Lambda ready project (folder must contain a valid
config.yaml and handler module (e.g.: service.py).
:param str local_package:
The path to a local package with should be included in the deploy as
well (and/or is not available on PyPi)
"""
# Load and parse the config file.
path_to_config_file = os.path.join(src, config_file)
cfg = read_cfg(path_to_config_file, profile_name)
# Copy all the pip dependencies required to run your code into a temporary
# folder then add the handler file in the root of this directory.
# Zip the contents of this folder into a single file and output to the dist
# directory.
path_to_zip_file = build(
src,
config_file=config_file,
requirements=requirements,
local_package=local_package,
)
existing_config = get_function_config(cfg)
if existing_config:
update_function(
cfg, path_to_zip_file, existing_config, preserve_vpc=preserve_vpc
)
else:
create_function(cfg, path_to_zip_file)
def deploy_s3(
src,
requirements=None,
local_package=None,
config_file="config.yaml",
profile_name=None,
preserve_vpc=False,
):
"""Deploys a new function via AWS S3.
:param str src:
The path to your Lambda ready project (folder must contain a valid
config.yaml and handler module (e.g.: service.py).
:param str local_package:
The path to a local package with should be included in the deploy as
well (and/or is not available on PyPi)
"""
# Load and parse the config file.
path_to_config_file = os.path.join(src, config_file)
cfg = read_cfg(path_to_config_file, profile_name)
# Copy all the pip dependencies required to run your code into a temporary
# folder then add the handler file in the root of this directory.
# Zip the contents of this folder into a single file and output to the dist
# directory.
path_to_zip_file = build(
src,
config_file=config_file,
requirements=requirements,
local_package=local_package,
)
use_s3 = True
s3_file = upload_s3(cfg, path_to_zip_file, use_s3)
existing_config = get_function_config(cfg)
if existing_config:
update_function(
cfg,
path_to_zip_file,
existing_config,
use_s3=use_s3,
s3_file=s3_file,
preserve_vpc=preserve_vpc,
)
else:
create_function(cfg, path_to_zip_file, use_s3=use_s3, s3_file=s3_file)
def upload(
src,
requirements=None,
local_package=None,
config_file="config.yaml",
profile_name=None,
):
"""Uploads a new function to AWS S3.
:param str src:
The path to your Lambda ready project (folder must contain a valid
config.yaml and handler module (e.g.: service.py).
:param str local_package:
The path to a local package with should be included in the deploy as
well (and/or is not available on PyPi)
"""
# Load and parse the config file.
path_to_config_file = os.path.join(src, config_file)
cfg = read_cfg(path_to_config_file, profile_name)
# Copy all the pip dependencies required to run your code into a temporary
# folder then add the handler file in the root of this directory.
# Zip the contents of this folder into a single file and output to the dist
# directory.
path_to_zip_file = build(
src,
config_file=config_file,
requirements=requirements,
local_package=local_package,
)
upload_s3(cfg, path_to_zip_file)
def invoke(
src,
event_file="event.json",
config_file="config.yaml",
profile_name=None,
verbose=False,
):
"""Simulates a call to your function.
:param str src:
The path to your Lambda ready project (folder must contain a valid
config.yaml and handler module (e.g.: service.py).
:param str alt_event:
An optional argument to override which event file to use.
:param bool verbose:
Whether to print out verbose details.
"""
# Load and parse the config file.
path_to_config_file = os.path.join(src, config_file)
cfg = read_cfg(path_to_config_file, profile_name)
# Set AWS_PROFILE environment variable based on `--profile` option.
if profile_name:
os.environ["AWS_PROFILE"] = profile_name
# Load environment variables from the config file into the actual
# environment.
env_vars = cfg.get("environment_variables")
if env_vars:
for key, value in env_vars.items():
os.environ[key] = get_environment_variable_value(value)
# Load and parse event file.
path_to_event_file = os.path.join(src, event_file)
event = read(path_to_event_file, loader=json.loads)
# Tweak to allow module to import local modules
try:
sys.path.index(src)
except ValueError:
sys.path.append(src)
handler = cfg.get("handler")
# Inspect the handler string (<module>.<function name>) and translate it
# into a function we can execute.
fn = get_callable_handler_function(src, handler)
timeout = cfg.get("timeout")
if timeout:
context = LambdaContext(cfg.get("function_name"), timeout)
else:
context = LambdaContext(cfg.get("function_name"))
start = time.time()
results = fn(event, context)
end = time.time()
print("{0}".format(results))
if verbose:
print(
"\nexecution time: {:.8f}s\nfunction execution "
"timeout: {:2}s".format(end - start, cfg.get("timeout", 15))
)
def init(src, minimal=False):
"""Copies template files to a given directory.
:param str src:
The path to output the template lambda project files.
:param bool minimal:
Minimal possible template files (excludes event.json).
"""
templates_path = os.path.join(
os.path.dirname(os.path.abspath(__file__)), "project_templates",
)
for filename in os.listdir(templates_path):
if (minimal and filename == "event.json") or filename.endswith(".pyc"):
continue
dest_path = os.path.join(templates_path, filename)
if not os.path.isdir(dest_path):
copy(dest_path, src)
def build(
src,
requirements=None,
local_package=None,
config_file="config.yaml",
profile_name=None,
):
"""Builds the file bundle.
:param str src:
The path to your Lambda ready project (folder must contain a valid
config.yaml and handler module (e.g.: service.py).
:param str local_package:
The path to a local package with should be included in the deploy as
well (and/or is not available on PyPi)
"""
# Load and parse the config file.
path_to_config_file = os.path.join(src, config_file)
cfg = read_cfg(path_to_config_file, profile_name)
# Get the absolute path to the output directory and create it if it doesn't
# already exist.
dist_directory = cfg.get("dist_directory", "dist")
path_to_dist = os.path.join(src, dist_directory)
mkdir(path_to_dist)
# Combine the name of the Lambda function with the current timestamp to use
# for the output filename.
function_name = cfg.get("function_name")
output_filename = "{0}-{1}.zip".format(timestamp(), function_name)
path_to_temp = mkdtemp(prefix="aws-lambda")
pip_install_to_target(
path_to_temp, requirements=requirements, local_package=local_package,
)
# Hack for Zope.
if "zope" in os.listdir(path_to_temp):
print(
"Zope packages detected; fixing Zope package paths to "
"make them importable.",
)
# Touch.
with open(os.path.join(path_to_temp, "zope/__init__.py"), "wb"):
pass
# Gracefully handle whether ".zip" was included in the filename or not.
output_filename = (
"{0}.zip".format(output_filename)
if not output_filename.endswith(".zip")
else output_filename
)
# Allow definition of source code directories we want to build into our
# zipped package.
build_config = defaultdict(**cfg.get("build", {}))
build_source_directories = build_config.get("source_directories", "")
build_source_directories = (
build_source_directories
if build_source_directories is not None
else ""
)
source_directories = [
d.strip() for d in build_source_directories.split(",")
]
files = []
for filename in os.listdir(src):
if os.path.isfile(filename):
if filename == ".DS_Store":
continue
if filename == config_file:
continue
print("Bundling: %r" % filename)
files.append(os.path.join(src, filename))
elif os.path.isdir(filename) and filename in source_directories:
print("Bundling directory: %r" % filename)
files.append(os.path.join(src, filename))
# "cd" into `temp_path` directory.
os.chdir(path_to_temp)
for f in files:
if os.path.isfile(f):
_, filename = os.path.split(f)
# Copy handler file into root of the packages folder.
copyfile(f, os.path.join(path_to_temp, filename))
copystat(f, os.path.join(path_to_temp, filename))
elif os.path.isdir(f):
src_path_length = len(src) + 1
destination_folder = os.path.join(
path_to_temp, f[src_path_length:]
)
copytree(f, destination_folder)
# Zip them together into a single file.
# TODO: Delete temp directory created once the archive has been compiled.
path_to_zip_file = archive("./", path_to_dist, output_filename)
return path_to_zip_file
def get_callable_handler_function(src, handler):
"""Translate a string of the form "module.function" into a callable
function.
:param str src:
The path to your Lambda project containing a valid handler file.
:param str handler:
A dot delimited string representing the `<module>.<function name>`.
"""
# "cd" into `src` directory.
os.chdir(src)
module_name, function_name = handler.split(".")
filename = get_handler_filename(handler)
path_to_module_file = os.path.join(src, filename)
module = load_source(module_name, path_to_module_file)
return getattr(module, function_name)
def get_handler_filename(handler):
"""Shortcut to get the filename from the handler string.
:param str handler:
A dot delimited string representing the `<module>.<function name>`.
"""
module_name, _ = handler.split(".")
return "{0}.py".format(module_name)
def _install_packages(path, packages):
"""Install all packages listed to the target directory.
Ignores any package that includes Python itself and python-lambda as well
since its only needed for deploying and not running the code
:param str path:
Path to copy installed pip packages to.
:param list packages:
A list of packages to be installed via pip.
"""
def _filter_blacklist(package):
blacklist = ["-i", "#", "Python==", "python-lambda=="]
return all(package.startswith(entry) is False for entry in blacklist)
filtered_packages = filter(_filter_blacklist, packages)
for package in filtered_packages:
if package.startswith("-e "):
package = package.replace("-e ", "")
print("Installing {package}".format(package=package))
subprocess.check_call(
[
sys.executable,
"-m",
"pip",
"install",
package,
"-t",
path,
"--ignore-installed",
]
)
print(
"Install directory contents are now: {directory}".format(
directory=os.listdir(path)
)
)
def pip_install_to_target(path, requirements=None, local_package=None):
"""For a given active virtualenv, gather all installed pip packages then
copy (re-install) them to the path provided.
:param str path:
Path to copy installed pip packages to.
:param str requirements:
If set, only the packages in the supplied requirements file are
installed.
If not set then installs all packages found via pip freeze.
:param str local_package:
The path to a local package with should be included in the deploy as
well (and/or is not available on PyPi)
"""
packages = []
if not requirements:
print("Gathering pip packages")
pkgStr = subprocess.check_output(
[sys.executable, "-m", "pip", "freeze"]
)
packages.extend(pkgStr.decode("utf-8").splitlines())
else:
if os.path.exists(requirements):
print("Gathering requirement packages")
data = read(requirements)
packages.extend(data.splitlines())
if not packages:
print("No dependency packages installed!")
if local_package is not None:
if not isinstance(local_package, (list, tuple)):
local_package = [local_package]
for l_package in local_package:
packages.append(l_package)
_install_packages(path, packages)
def get_role_name(region, account_id, role):
"""Shortcut to insert the `account_id` and `role` into the iam string."""
prefix = ARN_PREFIXES.get(region, "aws")
return "arn:{0}:iam::{1}:role/{2}".format(prefix, account_id, role)
def get_account_id(
profile_name, aws_access_key_id, aws_secret_access_key, region=None,
):
"""Query STS for a users' account_id"""
client = get_client(
"sts", profile_name, aws_access_key_id, aws_secret_access_key, region,
)
return client.get_caller_identity().get("Account")
def get_client(
client,
profile_name,
aws_access_key_id,
aws_secret_access_key,
region=None,
):
"""Shortcut for getting an initialized instance of the boto3 client."""
boto3.setup_default_session(
profile_name=profile_name,
aws_access_key_id=aws_access_key_id,
aws_secret_access_key=aws_secret_access_key,
region_name=region,
)
return boto3.client(client)
def create_function(cfg, path_to_zip_file, use_s3=False, s3_file=None):
"""Register and upload a function to AWS Lambda."""
print("Creating your new Lambda function")
byte_stream = read(path_to_zip_file, binary_file=True)
profile_name = cfg.get("profile")
aws_access_key_id = cfg.get("aws_access_key_id")
aws_secret_access_key = cfg.get("aws_secret_access_key")
account_id = get_account_id(
profile_name,
aws_access_key_id,
aws_secret_access_key,
cfg.get("region",),
)
role = get_role_name(
cfg.get("region"),
account_id,
cfg.get("role", "lambda_basic_execution"),
)
client = get_client(
"lambda",
profile_name,
aws_access_key_id,
aws_secret_access_key,
cfg.get("region"),
)
# Do we prefer development variable over config?
buck_name = os.environ.get("S3_BUCKET_NAME") or cfg.get("bucket_name")
func_name = os.environ.get("LAMBDA_FUNCTION_NAME") or cfg.get(
"function_name"
)
print("Creating lambda function with name: {}".format(func_name))
if use_s3:
kwargs = {
"FunctionName": func_name,
"Runtime": cfg.get("runtime", "python2.7"),
"Role": role,
"Handler": cfg.get("handler"),
"Code": {
"S3Bucket": "{}".format(buck_name),
"S3Key": "{}".format(s3_file),
},
"Description": cfg.get("description", ""),
"Timeout": cfg.get("timeout", 15),
"MemorySize": cfg.get("memory_size", 512),
"VpcConfig": {
"SubnetIds": cfg.get("subnet_ids", []),
"SecurityGroupIds": cfg.get("security_group_ids", []),
},
"Publish": True,
}
else:
kwargs = {
"FunctionName": func_name,
"Runtime": cfg.get("runtime", "python2.7"),
"Role": role,
"Handler": cfg.get("handler"),
"Code": {"ZipFile": byte_stream},
"Description": cfg.get("description", ""),
"Timeout": cfg.get("timeout", 15),
"MemorySize": cfg.get("memory_size", 512),
"VpcConfig": {
"SubnetIds": cfg.get("subnet_ids", []),
"SecurityGroupIds": cfg.get("security_group_ids", []),
},
"Publish": True,
}
if "tags" in cfg:
kwargs.update(
Tags={key: str(value) for key, value in cfg.get("tags").items()}
)
if "environment_variables" in cfg:
kwargs.update(
Environment={
"Variables": {
key: get_environment_variable_value(value)
for key, value in cfg.get("environment_variables").items()
},
},
)
client.create_function(**kwargs)
concurrency = get_concurrency(cfg)
if concurrency > 0:
client.put_function_concurrency(
FunctionName=func_name, ReservedConcurrentExecutions=concurrency
)
def update_function(
cfg,
path_to_zip_file,
existing_cfg,
use_s3=False,
s3_file=None,
preserve_vpc=False,
):
"""Updates the code of an existing Lambda function"""
print("Updating your Lambda function")
byte_stream = read(path_to_zip_file, binary_file=True)
profile_name = cfg.get("profile")
aws_access_key_id = cfg.get("aws_access_key_id")
aws_secret_access_key = cfg.get("aws_secret_access_key")
account_id = get_account_id(
profile_name,
aws_access_key_id,
aws_secret_access_key,
cfg.get("region",),
)
role = get_role_name(
cfg.get("region"),
account_id,
cfg.get("role", "lambda_basic_execution"),
)
client = get_client(
"lambda",
profile_name,
aws_access_key_id,
aws_secret_access_key,
cfg.get("region"),
)
# Do we prefer development variable over config?
buck_name = os.environ.get("S3_BUCKET_NAME") or cfg.get("bucket_name")
if use_s3:
client.update_function_code(
FunctionName=cfg.get("function_name"),
S3Bucket="{}".format(buck_name),
S3Key="{}".format(s3_file),
Publish=True,
)
else:
client.update_function_code(
FunctionName=cfg.get("function_name"),
ZipFile=byte_stream,
Publish=True,
)
kwargs = {
"FunctionName": cfg.get("function_name"),
"Role": role,
"Runtime": cfg.get("runtime"),
"Handler": cfg.get("handler"),
"Description": cfg.get("description", ""),
"Timeout": cfg.get("timeout", 15),
"MemorySize": cfg.get("memory_size", 512),
}
if preserve_vpc:
kwargs["VpcConfig"] = existing_cfg.get("Configuration", {}).get(
"VpcConfig"
)
if kwargs["VpcConfig"] is None:
kwargs["VpcConfig"] = {
"SubnetIds": cfg.get("subnet_ids", []),
"SecurityGroupIds": cfg.get("security_group_ids", []),
}
else:
del kwargs["VpcConfig"]["VpcId"]
else:
kwargs["VpcConfig"] = {
"SubnetIds": cfg.get("subnet_ids", []),
"SecurityGroupIds": cfg.get("security_group_ids", []),
}
if "environment_variables" in cfg:
kwargs.update(
Environment={
"Variables": {
key: str(get_environment_variable_value(value))
for key, value in cfg.get("environment_variables").items()
},
},
)
ret = client.update_function_configuration(**kwargs)
concurrency = get_concurrency(cfg)
if concurrency > 0:
client.put_function_concurrency(
FunctionName=cfg.get("function_name"),
ReservedConcurrentExecutions=concurrency,
)
elif "Concurrency" in existing_cfg:
client.delete_function_concurrency(
FunctionName=cfg.get("function_name")
)
if "tags" in cfg:
tags = {key: str(value) for key, value in cfg.get("tags").items()}
if tags != existing_cfg.get("Tags"):
if existing_cfg.get("Tags"):
client.untag_resource(
Resource=ret["FunctionArn"],
TagKeys=list(existing_cfg["Tags"].keys()),
)
client.tag_resource(Resource=ret["FunctionArn"], Tags=tags)
def upload_s3(cfg, path_to_zip_file, *use_s3):
"""Upload a function to AWS S3."""
print("Uploading your new Lambda function")
profile_name = cfg.get("profile")
aws_access_key_id = cfg.get("aws_access_key_id")
aws_secret_access_key = cfg.get("aws_secret_access_key")
client = get_client(
"s3",
profile_name,
aws_access_key_id,
aws_secret_access_key,
cfg.get("region"),
)
byte_stream = b""
with open(path_to_zip_file, mode="rb") as fh:
byte_stream = fh.read()
s3_key_prefix = cfg.get("s3_key_prefix", "/dist")
checksum = hashlib.new("md5", byte_stream).hexdigest()
timestamp = str(time.time())
filename = "{prefix}{checksum}-{ts}.zip".format(
prefix=s3_key_prefix, checksum=checksum, ts=timestamp,
)
# Do we prefer development variable over config?
buck_name = os.environ.get("S3_BUCKET_NAME") or cfg.get("bucket_name")
func_name = os.environ.get("LAMBDA_FUNCTION_NAME") or cfg.get(
"function_name"
)
kwargs = {
"Bucket": "{}".format(buck_name),
"Key": "{}".format(filename),
"Body": byte_stream,
}
client.put_object(**kwargs)
print("Finished uploading {} to S3 bucket {}".format(func_name, buck_name))
if use_s3:
return filename
def get_function_config(cfg):
"""Check whether a function exists or not and return its config"""
function_name = cfg.get("function_name")
profile_name = cfg.get("profile")
aws_access_key_id = cfg.get("aws_access_key_id")
aws_secret_access_key = cfg.get("aws_secret_access_key")
client = get_client(
"lambda",
profile_name,
aws_access_key_id,
aws_secret_access_key,
cfg.get("region"),
)
try:
return client.get_function(FunctionName=function_name)
except client.exceptions.ResourceNotFoundException as e:
if "Function not found" in str(e):
return False
def get_concurrency(cfg):
"""Return the Reserved Concurrent Executions if present in the config"""
concurrency = int(cfg.get("concurrency", 0))
return max(0, concurrency)
def read_cfg(path_to_config_file, profile_name):
cfg = read(path_to_config_file, loader=yaml.full_load)
if profile_name is not None:
cfg["profile"] = profile_name
elif "AWS_PROFILE" in os.environ:
cfg["profile"] = os.environ["AWS_PROFILE"]
return cfg
|
'''Contains the EpubBuilder class to build epub2.0.1 files with the getebook
module.'''
import html
import re
import datetime
import getebook
import os.path
import re
import zipfile
__all__ = ['EpubBuilder', 'EpubTOC', 'Author']
def _normalize(name):
'''Transform "Firstname [Middlenames] Lastname" into
"Lastname, Firstname [Middlenames]".'''
split = name.split()
if len(split) == 1:
return name
return split[-1] + ', ' + ' '.join(name[0:-1])
def _make_starttag(tag, attrs):
'Write a starttag.'
out = '<' + tag
for key in attrs:
out += ' {}="{}"'.format(key, html.escape(attrs[key]))
out += '>'
return out
def _make_xml_elem(tag, text, attr = []):
'Write a flat xml element.'
out = ' <' + tag
for (key, val) in attr:
out += ' {}="{}"'.format(key, val)
if text:
out += '>{}</{}>\n'.format(text, tag)
else:
out += ' />\n'
return out
class EpubTOC(getebook.TOC):
'Table of contents.'
_head = ((
'<?xml version="1.0" encoding="UTF-8"?>\n'
'<ncx xmlns="http://www.daisy.org/z3986/2005/ncx/" version="2005-1" xml:lang="en-US">\n'
' <head>\n'
' <meta name="dtb:uid" content="{}" />\n'
' <meta name="dtb:depth" content="{}" />\n'
' <meta name="dtb:totalPageCount" content="0" />\n'
' <meta name="dtb:maxPageNumber" content="0" />\n'
' </head>\n'
' <docTitle>\n'
' <text>{}</text>\n'
' </docTitle>\n'
))
_doc_author = ((
' <docAuthor>\n'
' <text>{}</text>\n'
' </docAuthor>\n'
))
_navp = ((
'{0}<navPoint id="nav{1}">\n'
'{0} <navLabel>\n'
'{0} <text>{2}</text>\n'
'{0} </navLabel>\n'
'{0} <content src="{3}" />\n'
))
def _navp_xml(self, entry, indent_lvl):
'Write xml for an entry and all its subentries.'
xml = self._navp.format(' '*indent_lvl, str(entry.no), entry.text,
entry.target)
for sub in entry.entries:
xml += self._navp_xml(sub, indent_lvl+1)
xml += ' '*indent_lvl + '</navPoint>\n'
return xml
def write_xml(self, uid, title, authors):
'Write the xml code for the table of contents.'
xml = self._head.format(uid, self.max_depth, title)
for aut in authors:
xml += self._doc_author.format(aut)
xml += ' <navMap>\n'
for entry in self.entries:
xml += self._navp_xml(entry, 2)
xml += ' </navMap>\n</ncx>'
return xml
class _Fileinfo:
'Information about a component file of an epub.'
def __init__(self, name, in_spine = True, guide_title = None,
guide_type = None):
'''Initialize the object. If the file does not belong in the
reading order, in_spine should be set to False. If it should
appear in the guide, set guide_title and guide_type.'''
self.name = name
(self.ident, ext) = os.path.splitext(name)
name_split = name.rsplit('.', 1)
self.ident = name_split[0]
self.in_spine = in_spine
self.guide_title = guide_title
self.guide_type = guide_type
# Infer media-type from file extension
ext = ext.lower()
if ext in ('.htm', '.html', '.xhtml'):
self.media_type = 'application/xhtml+xml'
elif ext in ('.png', '.gif', '.jpeg'):
self.media_type = 'image/' + ext
elif ext == '.jpg':
self.media_type = 'image/jpeg'
elif ext == '.css':
self.media_type = 'text/css'
elif ext == '.ncx':
self.media_type = 'application/x-dtbncx+xml'
else:
raise ValueError('Can\'t infer media-type from extension: %s' % ext)
def manifest_entry(self):
'Write the XML element for the manifest.'
return _make_xml_elem('item', '',
[
('href', self.name),
('id', self.ident),
('media-type', self.media_type)
])
def spine_entry(self):
'''Write the XML element for the spine.
(Empty string if in_spine is False.)'''
if self.in_spine:
return _make_xml_elem('itemref', '', [('idref', self.ident)])
else:
return ''
def guide_entry(self):
'''Write the XML element for the guide.
(Empty string if no guide title and type are given.)'''
if self.guide_title and self.guide_type:
return _make_xml_elem('reference', '',
[
('title', self.guide_title),
('type', self.guide_type),
('href', self.name)
])
else:
return ''
class _EpubMeta:
'Metadata entry for an epub file.'
def __init__(self, tag, text, *args):
'''The metadata entry is an XML element. *args is used for
supplying the XML element's attributes as (key, value) pairs.'''
self.tag = tag
self.text = text
self.attr = args
def write_xml(self):
'Write the XML element.'
return _make_xml_elem(self.tag, self.text, self.attr)
def __repr__(self):
'Returns the text.'
return self.text
def __str__(self):
'Returns the text.'
return self.text
class _EpubDate(_EpubMeta):
'Metadata element for the publication date.'
_date_re = re.compile('^([0-9]{4})(-[0-9]{2}(-[0-9]{2})?)?$')
def __init__(self, date):
'''date must be a string of the form "YYYY[-MM[-DD]]". If it is
not of this form, or if the date is invalid, ValueError is
raised.'''
m = self._date_re.match(date)
if not m:
raise ValueError('invalid date format')
year = int(m.group(1))
try:
mon = int(m.group(2)[1:])
if mon < 0 or mon > 12:
raise ValueError('month must be in 1..12')
except IndexError:
pass
try:
day = int(m.group(3)[1:])
datetime.date(year, mon, day) # raises ValueError if invalid
except IndexError:
pass
self.tag = 'dc:date'
self.text = date
self.attr = ()
class _EpubLang(_EpubMeta):
'Metadata element for the language of the book.'
_lang_re = re.compile('^[a-z]{2}(-[A-Z]{2})?$')
def __init__(self, lang):
'''lang must be a lower-case two-letter language code,
optionally followed by a "-" and a upper-case two-letter country
code. (e.g., "en", "en-US", "en-UK", "de", "de-DE", "de-AT")'''
if self._lang_re.match(lang):
self.tag = 'dc:language'
self.text = lang
self.attr = ()
else:
raise ValueError('invalid language format')
class Author(_EpubMeta):
'''To control the file-as and role attribute for the authors, pass
an Author object to the EpubBuilder instead of a string. The file-as
attribute is a form of the name used for sorting. The role attribute
describes how the person was involved in the work.
You ONLY need this if an author's name is not of the form
"Given-name Family-name", or if you want to specify a role other
than author. Otherwise, you can just pass a string.
The value of role should be a MARC relator, e.g., "aut" for author
or "edt" for editor. See http://www.loc.gov/marc/relators/ for a
full list.'''
def __init__(self, name, fileas = None, role = 'aut'):
'''Initialize the object. If the argument "fileas" is not given,
"Last-name, First-name" is used for the file-as attribute. If
the argument "role" is not given, "aut" is used for the role
attribute.'''
if not fileas:
fileas = _normalize(name)
self.tag = 'dc:creator'
self.text = name
self.attr = (('opf:file-as', fileas), ('opf:role', role))
class _OPFfile:
'''Class for writing the OPF (Open Packaging Format) file for an
epub file. The OPF file contains the metadata, a manifest of all
component files in the epub, a "spine" which specifies the reading
order and a guide which points to important components of the book
such as the title page.'''
_opf = (
'<?xml version="1.0" encoding="UTF-8"?>\n'
'<package version="2.0" xmlns="http://www.idpf.org/2007/opf" unique_identifier="uid_id">\n'
' <metadata xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:opf="http://www.idpf.org/2007/opf">\n'
'{}'
' </metadata>\n'
' <manifest>\n'
'{}'
' </manifest>\n'
' <spine toc="toc">\n'
'{}'
' </spine>\n'
' <guide>\n'
'{}'
' </guide>\n'
'</package>\n'
)
def __init__(self):
'Initialize.'
self.meta = []
self.filelist = []
def write_xml(self):
'Write the XML code for the OPF file.'
metadata = ''
for elem in self.meta:
metadata += elem.write_xml()
manif = ''
spine = ''
guide = ''
for finfo in self.filelist:
manif += finfo.manifest_entry()
spine += finfo.spine_entry()
guide += finfo.guide_entry()
return self._opf.format(metadata, manif, spine, guide)
class EpubBuilder:
'''Builds an epub2.0.1 file. Some of the attributes of this class
(title, uid, lang) are marked as "mandatory" because they represent
metadata that is required by the epub specification. If these
attributes are left unset, default values will be used.'''
_style_css = (
'h1, h2, h3, h4, h5, h6 {\n'
' text-align: center;\n'
'}\n'
'p {\n'
' text-align: justify;\n'
' margin-top: 0.125em;\n'
' margin-bottom: 0em;\n'
' text-indent: 1.0em;\n'
'}\n'
'.getebook-tp {\n'
' margin-top: 8em;\n'
'}\n'
'.getebook-tp-authors {\n'
' font-size: 2em;\n'
' text-align: center;\n'
' margin-bottom: 1em;\n'
'}\n'
'.getebook-tp-title {\n'
' font-weight: bold;\n'
' font-size: 3em;\n'
' text-align: center;\n'
'}\n'
'.getebook-tp-sub {\n'
' text-align: center;\n'
' font-weight: normal;\n'
' font-size: 0.8em;\n'
' margin-top: 1em;\n'
'}\n'
'.getebook-false-h {\n'
' font-weight: bold;\n'
' font-size: 1.5em;\n'
'}\n'
'.getebook-small-h {\n'
' font-style: normal;\n'
' font-weight: normal;\n'
' font-size: 0.8em;\n'
'}\n'
)
_container_xml = (
'<?xml version="1.0"?>\n'
'<container version="1.0" xmlns="urn:oasis:names:tc:opendocument:xmlns:container">\n'
' <rootfiles>\n'
' <rootfile full-path="package.opf" media-type="application/oebps-package+xml"/>\n'
' </rootfiles>\n'
'</container>\n'
)
_html = (
'<?xml version="1.0" encoding="utf-8"?>\n'
'<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN" "http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd">\n'
'<html xmlns="http://www.w3.org/1999/xhtml">\n'
' <head>\n'
' <title>{}</title>\n'
' <meta http-equiv="content-type" content="application/xtml+xml; charset=utf-8" />\n'
' <link href="style.css" rel="stylesheet" type="text/css" />\n'
' </head>\n'
' <body>\n{}'
' </body>\n'
'</html>\n'
)
_finalized = False
def __init__(self, epub_file):
'''Initialize the EpubBuilder instance. "epub_file" is the
filename of the epub to be created.'''
self.epub_f = zipfile.ZipFile(epub_file, 'w', zipfile.ZIP_DEFLATED)
self.epub_f.writestr('mimetype', 'application/epub+zip')
self.epub_f.writestr('META-INF/container.xml', self._container_xml)
self.toc = EpubTOC()
self.opf = _OPFfile()
self.opf.filelist.append(_Fileinfo('toc.ncx', False))
self.opf.filelist.append(_Fileinfo('style.css', False))
self._authors = []
self.opt_meta = {} # Optional metadata (other than authors)
self.content = ''
self.part_no = 0
self.cont_filename = 'part%03d.html' % self.part_no
def __enter__(self):
'Return self for use in with ... as ... statement.'
return self
def __exit__(self, except_type, except_val, traceback):
'Call finalize() and close the file.'
try:
self.finalize()
finally:
# Close again in case an exception happened in finalize()
self.epub_f.close()
return False
@property
def uid(self):
'''Unique identifier of the ebook. (mandatory)
If this property is left unset, a pseudo-random string will be
generated which is long enough for collisions with existing
ebooks to be extremely unlikely.'''
try:
return self._uid
except AttributeError:
import random
from string import (ascii_letters, digits)
alnum = ascii_letters + digits
self.uid = ''.join([random.choice(alnum) for i in range(15)])
return self._uid
@uid.setter
def uid(self, val):
self._uid = _EpubMeta('dc:identifier', str(val), ('id', 'uid_id'))
@property
def title(self):
'''Title of the ebook. (mandatory)
If this property is left unset, it defaults to "Untitled".'''
try:
return self._title
except AttributeError:
self.title = 'Untitled'
return self._title
@title.setter
def title(self, val):
# If val is not a string, raise TypeError now rather than later.
self._title = _EpubMeta('dc:title', '' + val)
@property
def lang(self):
'''Language of the ebook. (mandatory)
The language must be given as a lower-case two-letter code, optionally
followed by a "-" and an upper-case two-letter country code.
(e.g., "en", "en-US", "en-UK", "de", "de-DE", "de-AT")
If this property is left unset, it defaults to "en".'''
try:
return self._lang
except AttributeError:
self.lang = 'en'
return self._lang
@lang.setter
def lang(self, val):
self._lang = _EpubLang(val)
@property
def author(self):
'''Name of the author. (optional)
If there are multiple authors, pass a list of strings.
To control the file-as and role attribute, use author objects instead
of strings; file-as is an alternate form of the name used for sorting.
For a description of the role attribute, see the docstring of the
author class.'''
if len(self._authors) == 1:
return self._authors[0]
return tuple([aut for aut in self._authors])
@author.setter
def author(self, val):
if isinstance(val, Author) or isinstance(val, str):
authors = [val]
else:
authors = val
for aut in authors:
try:
self._authors.append(Author('' + aut))
except TypeError:
# aut is not a string, so it should be an Author object
self._authors.append(aut)
@author.deleter
def author(self):
self._authors = []
@property
def date(self):
'''Publication date. (optional)
Must be given in "YYYY[-MM[-DD]]" format.'''
try:
return self.opt_meta['date']
except KeyError:
return None
@date.setter
def date(self, val):
self.opt_meta['date'] = _EpubDate(val)
@date.deleter
def date(self):
del self._date
@property
def rights(self):
'Copyright/licensing information. (optional)'
try:
return self.opt_meta['rights']
except KeyError:
return None
@rights.setter
def rights(self, val):
self.opt_meta['rights'] = _EpubMeta('dc:rights', '' + val)
@rights.deleter
def rights(self):
del self._rights
@property
def publisher(self):
'Publisher name. (optional)'
try:
return self.opt_meta['publisher']
except KeyError:
return None
@publisher.setter
def publisher(self, val):
self.opt_meta['publisher'] = _EpubMeta('dc:publisher', '' + val)
@publisher.deleter
def publisher(self):
del self._publisher
@property
def style_css(self):
'''CSS stylesheet for the files that are generated by the EpubBuilder
instance. Can be overwritten or extended, but not deleted.'''
return self._style_css
@style_css.setter
def style_css(self, val):
self._style_css = '' + val
def titlepage(self, main_title = None, subtitle = None):
'''Create a title page for the ebook. If no main_title is given,
the title attribute of the EpubBuilder instance is used.'''
tp = '<div class="getebook-tp">\n'
if len(self._authors) >= 1:
if len(self._authors) == 1:
aut_str = str(self._authors[0])
else:
aut_str = ', '.join(str(self._authors[0:-1])) + ', and ' \
+ str(self._authors[-1])
tp += '<div class="getebook-tp-authors">%s</div>\n' % aut_str
if not main_title:
main_title = str(self.title)
tp += '<div class="getebook-tp-title">%s' % main_title
if subtitle:
tp += '<div class="getebook-tp-sub">%s</div>' % subtitle
tp += '</div>\n</div>\n'
self.opf.filelist.insert(0, _Fileinfo('title.html',
guide_title = 'Titlepage', guide_type = 'title-page'))
self.epub_f.writestr('title.html', self._html.format(self.title, tp))
def headingpage(self, heading, subtitle = None, toc_text = None):
'''Create a page containing only a (large) heading, optionally
with a smaller subtitle. If toc_text is not given, it defaults
to the heading.'''
self.new_part()
tag = 'h%d' % min(6, self.toc.depth)
self.content += '<div class="getebook-tp">'
self.content += '<{} class="getebook-tp-title">{}'.format(tag, heading)
if subtitle:
self.content += '<div class="getebook-tp-sub">%s</div>' % subtitle
self.content += '</%s>\n' % tag
if not toc_text:
toc_text = heading
self.toc.new_entry(toc_text, self.cont_filename)
self.new_part()
def insert_file(self, name, in_spine = False, guide_title = None,
guide_type = None, arcname = None):
'''Include an external file into the ebook. By default, it will
be added to the archive under its basename; the argument
"arcname" can be used to specify a different name.'''
if not arcname:
arcname = os.path.basename(name)
self.opf.filelist.append(_Fileinfo(arcname, in_spine, guide_title,
guide_type))
self.epub_f.write(name, arcname)
def add_file(self, arcname, str_or_bytes, in_spine = False,
guide_title = None, guide_type = None):
'''Add the string or bytes instance str_or_bytes to the archive
under the name arcname.'''
self.opf.filelist.append(_Fileinfo(arcname, in_spine, guide_title,
guide_type))
self.epub_f.writestr(arcname, str_or_bytes)
def false_heading(self, elem):
'''Handle a "false heading", i.e., text that appears in heading
tags in the source even though it is not a chapter heading.'''
elem.attrs['class'] = 'getebook-false-h'
elem.tag = 'p'
self.handle_elem(elem)
def _heading(self, elem):
'''Write a heading.'''
# Handle paragraph heading if we have one waiting (see the
# par_heading method). We don\'t use _handle_par_h here because
# we merge it with the subsequent proper heading.
try:
par_h = self.par_h
del self.par_h
except AttributeError:
toc_text = elem.text
else:
# There is a waiting paragraph heading, we merge it with the
# new heading.
toc_text = par_h.text + '. ' + elem.text
par_h.tag = 'div'
par_h.attrs['class'] = 'getebook-small-h'
elem.children.insert(0, par_h)
# Set the class attribute value.
elem.attrs['class'] = 'getebook-chapter-h'
self.toc.new_entry(toc_text, self.cont_filename)
# Add heading to the epub.
tag = 'h%d' % min(self.toc.depth, 6)
self.content += _make_starttag(tag, elem.attrs)
for elem in elem.children:
self.handle_elem(elem)
self.content += '</%s>\n' % tag
def par_heading(self, elem):
'''Handle a "paragraph heading", i.e., a chaper heading or part
of a chapter heading inside paragraph tags. If it is immediately
followed by a heading, they will be merged into one.'''
self.par_h = elem
def _handle_par_h(self):
'Check if there is a waiting paragraph heading and handle it.'
try:
self._heading(self.par_h)
except AttributeError:
pass
def handle_elem(self, elem):
'Handle html element as supplied by getebook.EbookParser.'
try:
tag = elem.tag
except AttributeError:
# elem should be a string
is_string = True
tag = None
else:
is_string = False
if tag in getebook._headings:
self._heading(elem)
else:
# Handle waiting par_h if necessary (see par_heading)
try:
self._heading(self.par_h)
except AttributeError:
pass
if is_string:
self.content += elem
elif tag == 'br':
self.content += '<br />\n'
elif tag == 'img':
self.content += self._handle_image(elem.attrs) + '\n'
elif tag == 'a' or tag == 'noscript':
# Ignore tag, just write child elements
for child in elem.children:
self.handle_elem(child)
else:
self.content += _make_starttag(tag, elem.attrs)
for child in elem.children:
self.handle_elem(child)
self.content += '</%s>' % tag
if tag == 'p':
self.content += '\n'
def _handle_image(self, attrs):
'Returns the alt text of an image tag.'
try:
return attrs['alt']
except KeyError:
return ''
def new_part(self):
'''Begin a new part of the epub. Write the current html document
to the archive and begin a new one.'''
# Handle waiting par_h (see par_heading)
try:
self._heading(self.par_h)
except AttributeError:
pass
if self.content:
html = self._html.format(self.title, self.content)
self.epub_f.writestr(self.cont_filename, html)
self.part_no += 1
self.content = ''
self.cont_filename = 'part%03d.html' % self.part_no
self.opf.filelist.append(_Fileinfo(self.cont_filename))
def finalize(self):
'Complete and close the epub file.'
# Handle waiting par_h (see par_heading)
if self._finalized:
# Avoid finalizing twice. Otherwise, calling finalize inside
# a with-block would lead to an exception when __exit__
# calls finalize again.
return
try:
self._heading(self.par_h)
except AttributeError:
pass
if self.content:
html = self._html.format(self.title, self.content)
self.epub_f.writestr(self.cont_filename, html)
self.opf.meta = [self.uid, self.lang, self.title] + self._authors
self.opf.meta += self.opt_meta.values()
self.epub_f.writestr('package.opf', self.opf.write_xml())
self.epub_f.writestr('toc.ncx',
self.toc.write_xml(self.uid, self.title, self._authors))
self.epub_f.writestr('style.css', self._style_css)
self.epub_f.close()
self._finalized = True
|
import numpy as np
import pandas as pd
from pandas import Series, DataFrame
from scipy.spatial import distance
import matplotlib.pyplot as plt
from sklearn.cluster import DBSCAN
from sklearn import metrics
from sklearn.datasets.samples_generator import make_blobs
from sklearn.preprocessing import StandardScaler
from sklearn import decomposition # PCA
from sklearn.metrics import confusion_matrix
import json
import ml.Features as ft
from utils import Utils
class Identifier(object):
def __init__(self):
columns = ['mean_height', 'min_height', 'max_height', 'mean_width', 'min_width', 'max_width', 'time', 'girth','id']
self.data = DataFrame(columns=columns)
self.event = []
@staticmethod
def subscribe(ch, method, properties, body):
"""
prints the body message. It's the default callback method
:param ch: keep null
:param method: keep null
:param properties: keep null
:param body: the message
:return:
"""
#first we get the JSON from body
#we check if it's part of the walking event
#if walking event is completed, we
if __name__ == '__main__':
# we setup needed params
MAX_HEIGHT = 203
MAX_WIDTH = 142
SPEED = 3
SAMPLING_RATE = 8
mq_host = '172.26.56.122'
queue_name = 'door_data'
# setting up MQTT subscriber
Utils.sub(queue_name=queue_name,callback=subscribe,host=mq_host)
|
import time
def retries(times=3, timeout=1):
"""对未捕获异常进行重试"""
def decorator(func):
def _wrapper(*args, **kw):
att, retry = 0, 0
while retry < times:
retry += 1
try:
return func(*args, **kw)
except:
att += timeout
if retry < times:
time.sleep(att)
return _wrapper
return decorator
def empty_content_retries(times=3, timeout=2):
"""响应为空的进行重试"""
def decorator(func):
def _wrapper(*args, **kw):
att, retry = 0, 0
while retry < times:
retry += 1
ret = func(*args, **kw)
if ret:
return ret
att += timeout
time.sleep(att)
return _wrapper
return decorator
def use_logging(level):
"""带参数的装饰器"""
def decorator(func):
print func.__name__
def wrapper(*args, **kwargs):
if level == "warn":
print ("level:%s, %s is running" % (level, func.__name__))
elif level == "info":
print ("level:%s, %s is running" % (level, func.__name__))
return func(*args, **kwargs)
return wrapper
return decorator
if __name__ == "__main__":
@use_logging(level="warn")
def foo(name='foo'):
print("i am %s" % name)
foo()
|
"""
********************************************************************
Test file for implementation check of CR3BP library.
********************************************************************
Last update: 21/01/2022
Description
-----------
Contains a few sample orbit propagations to test the CR3BP library.
The orbits currently found in test file include:
- L2 southern NRHO (9:2 NRHO of Lunar Gateway Station)
- Distant Retrograde Orbit (DRO)
- Butterfly Orbit
- L2 Vertical Orbit
"""
import matplotlib.pyplot as plt
import numpy as np
from astropy import units as u
from CR3BP import getChar_CR3BP, propagate, propagateSTM
from poliastro.bodies import Earth, Moon
k1 = Earth.k.to(u.km**3 / u.s**2).value
k2 = Moon.k.to(u.km**3 / u.s**2).value
r12 = 384747.99198 # Earth-Moon distance
mu, kstr, lstr, tstr, vstr, nstr = getChar_CR3BP(k1, k2, r12)
"""
The orbit is a Near-Rectilinear Halo Orbit (NRHO) around the L2 Lagragian
point of the Earth-Moon system. The orbit presented here is a southern
sub-family of the L2-NRHO. This orbit is 9:2 resonant orbit currenly set
as the candidate orbit for the Lunar Gateway Station (LOP-G). Its called
9:2 resonant since a spacecraft would complete 9 orbits in the NRHO for
every 2 lunar month (slightly different from lunar orbit period).
The exact orbital elements presented here are from the auther's simulations.
The orbit states were obtained starting form guess solutions given in various
references. A few are provided below:
Ref: White Paper: Gateway Destination Orbit Model: A Continuous 15 Year NRHO
Reference Trajectory - NASA, 2019
Ref: Strategies for Low-Thrust Transfer Design Based on Direct Collocation
Techniques - Park, Howell and Folta
The NRHO are subfamily of the Halo orbits. The 'Near-Rectilinear' term comes
from the very elongated state of the orbit considering a regular Halo. Halo
orbits occur in all three co-linear equilibrum points L1,L2 and L3. They occur
in a pair of variants (nothern and southern) due to symmetry of CR3BP.
"""
r0 = np.array([[1.021881345465263, 0, -0.182000000000000]])
v0 = np.array([0, -0.102950816739606, 0])
tf = 1.509263667286943
Nplt = 300
tofs = np.linspace(0, tf, Nplt)
rf, vf = propagate(mu, r0, v0, tofs, rtol=1e-11)
rf = np.array(rf)
fig = plt.figure()
ax = plt.axes(projection="3d")
ax.set_box_aspect(
(np.ptp(rf[:, 0]), np.ptp(rf[:, 1]), np.ptp(rf[:, 2]))
) # aspect ratio is 1:1:1 in data space
ax.plot3D(1 - mu, 0, 0, "ok")
ax.set_title("L2 Southern NRHO")
ax.set_xlabel("x-axis [nd]")
ax.set_ylabel("y-axis [nd]")
ax.set_zlabel("z-axis [nd]")
ax.plot3D(rf[:, 0], rf[:, 1], rf[:, 2], "b")
plt.show()
"""
All other orbits in this section are computed from guess solutions available
in Grebow's Master and PhD thesis. He lists a quite detailed set of methods
to compute most of the major periodic orbits I have presented here. All of
them use differntial correction methods which are not yet implemented in this
library.
Ref: GENERATING PERIODIC ORBITS IN THE CIRCULAR RESTRICTED THREEBODY PROBLEM
WITH APPLICATIONS TO LUNAR SOUTH POLE COVERAGE
- D.Grebow 2006 (Master thesis)
Ref: TRAJECTORY DESIGN IN THE EARTH-MOON SYSTEM
AND LUNAR SOUTH POLE COVERAGE
- D.Grebow 2010 (PhD desertation)
"""
r0 = np.array([0.783390492345344, 0, 0])
v0 = np.array([0, 0.548464515316651, 0])
tf = 3.63052604667440
Nplt = 300
tofs = np.linspace(0, tf, Nplt)
rf, vf = propagate(mu, r0, v0, tofs, rtol=1e-11)
rf = np.array(rf)
fig = plt.figure()
ax = plt.axes(projection="3d")
ax.set_box_aspect(
(np.ptp(rf[:, 0]), np.ptp(rf[:, 1]), np.ptp(rf[:, 2]))
) # aspect ratio is 1:1:1 in data space
ax.plot3D(1 - mu, 0, 0, "ok")
ax.set_title("Distant Restrograde orbit (DRO)")
ax.set_xlabel("x-axis [nd]")
ax.set_ylabel("y-axis [nd]")
ax.set_zlabel("z-axis [nd]")
ax.plot3D(rf[:, 0], rf[:, 1], rf[:, 2], "m")
plt.show()
r0 = np.array([1.03599510774957, 0, 0.173944812752286])
v0 = np.array([0, -0.0798042160573269, 0])
tf = 2.78676904546834
Nplt = 300
tofs = np.linspace(0, tf, Nplt)
rf, vf = propagate(mu, r0, v0, tofs, rtol=1e-11)
rf = np.array(rf)
fig = plt.figure()
ax = plt.axes(projection="3d")
ax.set_box_aspect(
(np.ptp(rf[:, 0]), np.ptp(rf[:, 1]), np.ptp(rf[:, 2]))
) # aspect ratio is 1:1:1 in data space
ax.plot3D(1 - mu, 0, 0, "ok")
ax.set_title("Butterfly orbit")
ax.set_xlabel("x-axis [nd]")
ax.set_ylabel("y-axis [nd]")
ax.set_zlabel("z-axis [nd]")
ax.plot3D(rf[:, 0], rf[:, 1], rf[:, 2], "r")
plt.show()
r0 = np.array([0.504689989562366, 0, 0.836429774762193])
v0 = np.array([0, 0.552722840538063, 0])
tf = 6.18448756121754
Nplt = 300
tofs = np.linspace(0, tf, Nplt)
rf, vf = propagate(mu, r0, v0, tofs, rtol=1e-11)
rf = np.array(rf)
fig = plt.figure()
ax = plt.axes(projection="3d")
ax.set_box_aspect(
(np.ptp(rf[:, 0]), np.ptp(rf[:, 1]), np.ptp(rf[:, 2]))
) # aspect ratio is 1:1:1 in data space
ax.plot3D(1 - mu, 0, 0, "ok")
ax.set_title("L2 Vertical orbit")
ax.set_xlabel("x-axis [nd]")
ax.set_ylabel("y-axis [nd]")
ax.set_zlabel("z-axis [nd]")
ax.plot3D(rf[:, 0], rf[:, 1], rf[:, 2], "g")
plt.show()
STM0 = np.eye(6)
rf, vf, STM = propagateSTM(mu, r0, v0, STM0, tofs, rtol=1e-11)
|
FAILEDOPERATION = 'FailedOperation'
FAILEDOPERATION_APIGATEWAY = 'FailedOperation.ApiGateway'
FAILEDOPERATION_APIGW = 'FailedOperation.Apigw'
FAILEDOPERATION_APMCONFIGINSTANCEID = 'FailedOperation.ApmConfigInstanceId'
FAILEDOPERATION_ASYNCEVENTSTATUS = 'FailedOperation.AsyncEventStatus'
FAILEDOPERATION_COPYFAILED = 'FailedOperation.CopyFailed'
FAILEDOPERATION_COPYFUNCTION = 'FailedOperation.CopyFunction'
FAILEDOPERATION_COS = 'FailedOperation.Cos'
FAILEDOPERATION_CREATEALIAS = 'FailedOperation.CreateAlias'
FAILEDOPERATION_CREATEFUNCTION = 'FailedOperation.CreateFunction'
FAILEDOPERATION_CREATENAMESPACE = 'FailedOperation.CreateNamespace'
FAILEDOPERATION_CREATETRIGGER = 'FailedOperation.CreateTrigger'
FAILEDOPERATION_DEBUGMODESTATUS = 'FailedOperation.DebugModeStatus'
FAILEDOPERATION_DEBUGMODEUPDATETIMEOUTFAIL = 'FailedOperation.DebugModeUpdateTimeOutFail'
FAILEDOPERATION_DELETEALIAS = 'FailedOperation.DeleteAlias'
FAILEDOPERATION_DELETEFUNCTION = 'FailedOperation.DeleteFunction'
FAILEDOPERATION_DELETELAYERVERSION = 'FailedOperation.DeleteLayerVersion'
FAILEDOPERATION_DELETENAMESPACE = 'FailedOperation.DeleteNamespace'
FAILEDOPERATION_DELETETRIGGER = 'FailedOperation.DeleteTrigger'
FAILEDOPERATION_FUNCTIONNAMESTATUSERROR = 'FailedOperation.FunctionNameStatusError'
FAILEDOPERATION_FUNCTIONSTATUSERROR = 'FailedOperation.FunctionStatusError'
FAILEDOPERATION_FUNCTIONVERSIONSTATUSNOTACTIVE = 'FailedOperation.FunctionVersionStatusNotActive'
FAILEDOPERATION_GETALIAS = 'FailedOperation.GetAlias'
FAILEDOPERATION_GETFUNCTIONADDRESS = 'FailedOperation.GetFunctionAddress'
FAILEDOPERATION_INSUFFICIENTBALANCE = 'FailedOperation.InsufficientBalance'
FAILEDOPERATION_INVOKEFUNCTION = 'FailedOperation.InvokeFunction'
FAILEDOPERATION_NAMESPACE = 'FailedOperation.Namespace'
FAILEDOPERATION_OPENSERVICE = 'FailedOperation.OpenService'
FAILEDOPERATION_OPERATIONCONFLICT = 'FailedOperation.OperationConflict'
FAILEDOPERATION_PROVISIONCREATETIMER = 'FailedOperation.ProvisionCreateTimer'
FAILEDOPERATION_PROVISIONDELETETIMER = 'FailedOperation.ProvisionDeleteTimer'
FAILEDOPERATION_PROVISIONEDINPROGRESS = 'FailedOperation.ProvisionedInProgress'
FAILEDOPERATION_PUBLISHLAYERVERSION = 'FailedOperation.PublishLayerVersion'
FAILEDOPERATION_PUBLISHVERSION = 'FailedOperation.PublishVersion'
FAILEDOPERATION_QCSROLENOTFOUND = 'FailedOperation.QcsRoleNotFound'
FAILEDOPERATION_RESERVEDINPROGRESS = 'FailedOperation.ReservedInProgress'
FAILEDOPERATION_TOPICNOTEXIST = 'FailedOperation.TopicNotExist'
FAILEDOPERATION_TOTALCONCURRENCYMEMORYINPROGRESS = 'FailedOperation.TotalConcurrencyMemoryInProgress'
FAILEDOPERATION_UNOPENEDSERVICE = 'FailedOperation.UnOpenedService'
FAILEDOPERATION_UPDATEALIAS = 'FailedOperation.UpdateAlias'
FAILEDOPERATION_UPDATEFUNCTIONCODE = 'FailedOperation.UpdateFunctionCode'
FAILEDOPERATION_UPDATEFUNCTIONCONFIGURATION = 'FailedOperation.UpdateFunctionConfiguration'
INTERNALERROR = 'InternalError'
INTERNALERROR_APIGATEWAY = 'InternalError.ApiGateway'
INTERNALERROR_CKAFKA = 'InternalError.Ckafka'
INTERNALERROR_CMQ = 'InternalError.Cmq'
INTERNALERROR_COS = 'InternalError.Cos'
INTERNALERROR_ES = 'InternalError.ES'
INTERNALERROR_EXCEPTION = 'InternalError.Exception'
INTERNALERROR_GETROLEERROR = 'InternalError.GetRoleError'
INTERNALERROR_SYSTEM = 'InternalError.System'
INTERNALERROR_SYSTEMERROR = 'InternalError.SystemError'
INVALIDPARAMETER_FUNCTIONNAME = 'InvalidParameter.FunctionName'
INVALIDPARAMETER_PAYLOAD = 'InvalidParameter.Payload'
INVALIDPARAMETER_ROUTINGCONFIG = 'InvalidParameter.RoutingConfig'
INVALIDPARAMETERVALUE = 'InvalidParameterValue'
INVALIDPARAMETERVALUE_ACTION = 'InvalidParameterValue.Action'
INVALIDPARAMETERVALUE_ADDITIONALVERSIONWEIGHTS = 'InvalidParameterValue.AdditionalVersionWeights'
INVALIDPARAMETERVALUE_ALIAS = 'InvalidParameterValue.Alias'
INVALIDPARAMETERVALUE_APIGATEWAY = 'InvalidParameterValue.ApiGateway'
INVALIDPARAMETERVALUE_APMCONFIG = 'InvalidParameterValue.ApmConfig'
INVALIDPARAMETERVALUE_APMCONFIGINSTANCEID = 'InvalidParameterValue.ApmConfigInstanceId'
INVALIDPARAMETERVALUE_APMCONFIGREGION = 'InvalidParameterValue.ApmConfigRegion'
INVALIDPARAMETERVALUE_ARGS = 'InvalidParameterValue.Args'
INVALIDPARAMETERVALUE_ASYNCTRIGGERCONFIG = 'InvalidParameterValue.AsyncTriggerConfig'
INVALIDPARAMETERVALUE_CDN = 'InvalidParameterValue.Cdn'
INVALIDPARAMETERVALUE_CFSPARAMETERDUPLICATE = 'InvalidParameterValue.CfsParameterDuplicate'
INVALIDPARAMETERVALUE_CFSPARAMETERERROR = 'InvalidParameterValue.CfsParameterError'
INVALIDPARAMETERVALUE_CFSSTRUCTIONERROR = 'InvalidParameterValue.CfsStructionError'
INVALIDPARAMETERVALUE_CKAFKA = 'InvalidParameterValue.Ckafka'
INVALIDPARAMETERVALUE_CLIENTCONTEXT = 'InvalidParameterValue.ClientContext'
INVALIDPARAMETERVALUE_CLS = 'InvalidParameterValue.Cls'
INVALIDPARAMETERVALUE_CLSROLE = 'InvalidParameterValue.ClsRole'
INVALIDPARAMETERVALUE_CMQ = 'InvalidParameterValue.Cmq'
INVALIDPARAMETERVALUE_CODE = 'InvalidParameterValue.Code'
INVALIDPARAMETERVALUE_CODESECRET = 'InvalidParameterValue.CodeSecret'
INVALIDPARAMETERVALUE_CODESOURCE = 'InvalidParameterValue.CodeSource'
INVALIDPARAMETERVALUE_COMMAND = 'InvalidParameterValue.Command'
INVALIDPARAMETERVALUE_COMPATIBLERUNTIMES = 'InvalidParameterValue.CompatibleRuntimes'
INVALIDPARAMETERVALUE_CONTENT = 'InvalidParameterValue.Content'
INVALIDPARAMETERVALUE_COS = 'InvalidParameterValue.Cos'
INVALIDPARAMETERVALUE_COSBUCKETNAME = 'InvalidParameterValue.CosBucketName'
INVALIDPARAMETERVALUE_COSBUCKETREGION = 'InvalidParameterValue.CosBucketRegion'
INVALIDPARAMETERVALUE_COSOBJECTNAME = 'InvalidParameterValue.CosObjectName'
INVALIDPARAMETERVALUE_CUSTOMARGUMENT = 'InvalidParameterValue.CustomArgument'
INVALIDPARAMETERVALUE_DATETIME = 'InvalidParameterValue.DateTime'
INVALIDPARAMETERVALUE_DEADLETTERCONFIG = 'InvalidParameterValue.DeadLetterConfig'
INVALIDPARAMETERVALUE_DEFAULTNAMESPACE = 'InvalidParameterValue.DefaultNamespace'
INVALIDPARAMETERVALUE_DESCRIPTION = 'InvalidParameterValue.Description'
INVALIDPARAMETERVALUE_DNSINFO = 'InvalidParameterValue.DnsInfo'
INVALIDPARAMETERVALUE_EIPCONFIG = 'InvalidParameterValue.EipConfig'
INVALIDPARAMETERVALUE_ENABLE = 'InvalidParameterValue.Enable'
INVALIDPARAMETERVALUE_ENVIRONMENT = 'InvalidParameterValue.Environment'
INVALIDPARAMETERVALUE_ENVIRONMENTEXCEEDEDLIMIT = 'InvalidParameterValue.EnvironmentExceededLimit'
INVALIDPARAMETERVALUE_ENVIRONMENTSYSTEMPROTECT = 'InvalidParameterValue.EnvironmentSystemProtect'
INVALIDPARAMETERVALUE_FILTERS = 'InvalidParameterValue.Filters'
INVALIDPARAMETERVALUE_FUNCTION = 'InvalidParameterValue.Function'
INVALIDPARAMETERVALUE_FUNCTIONNAME = 'InvalidParameterValue.FunctionName'
INVALIDPARAMETERVALUE_GITBRANCH = 'InvalidParameterValue.GitBranch'
INVALIDPARAMETERVALUE_GITCOMMITID = 'InvalidParameterValue.GitCommitId'
INVALIDPARAMETERVALUE_GITDIRECTORY = 'InvalidParameterValue.GitDirectory'
INVALIDPARAMETERVALUE_GITPASSWORD = 'InvalidParameterValue.GitPassword'
INVALIDPARAMETERVALUE_GITURL = 'InvalidParameterValue.GitUrl'
INVALIDPARAMETERVALUE_GITUSERNAME = 'InvalidParameterValue.GitUserName'
INVALIDPARAMETERVALUE_HANDLER = 'InvalidParameterValue.Handler'
INVALIDPARAMETERVALUE_IDLETIMEOUT = 'InvalidParameterValue.IdleTimeOut'
INVALIDPARAMETERVALUE_IMAGEURI = 'InvalidParameterValue.ImageUri'
INVALIDPARAMETERVALUE_INLINEZIPFILE = 'InvalidParameterValue.InlineZipFile'
INVALIDPARAMETERVALUE_INVOKETYPE = 'InvalidParameterValue.InvokeType'
INVALIDPARAMETERVALUE_L5ENABLE = 'InvalidParameterValue.L5Enable'
INVALIDPARAMETERVALUE_LAYERNAME = 'InvalidParameterValue.LayerName'
INVALIDPARAMETERVALUE_LAYERS = 'InvalidParameterValue.Layers'
INVALIDPARAMETERVALUE_LIMIT = 'InvalidParameterValue.Limit'
INVALIDPARAMETERVALUE_LIMITEXCEEDED = 'InvalidParameterValue.LimitExceeded'
INVALIDPARAMETERVALUE_MEMORY = 'InvalidParameterValue.Memory'
INVALIDPARAMETERVALUE_MEMORYSIZE = 'InvalidParameterValue.MemorySize'
INVALIDPARAMETERVALUE_MINCAPACITY = 'InvalidParameterValue.MinCapacity'
INVALIDPARAMETERVALUE_NAME = 'InvalidParameterValue.Name'
INVALIDPARAMETERVALUE_NAMESPACE = 'InvalidParameterValue.Namespace'
INVALIDPARAMETERVALUE_NAMESPACEINVALID = 'InvalidParameterValue.NamespaceInvalid'
INVALIDPARAMETERVALUE_NODESPEC = 'InvalidParameterValue.NodeSpec'
INVALIDPARAMETERVALUE_NODETYPE = 'InvalidParameterValue.NodeType'
INVALIDPARAMETERVALUE_OFFSET = 'InvalidParameterValue.Offset'
INVALIDPARAMETERVALUE_ORDER = 'InvalidParameterValue.Order'
INVALIDPARAMETERVALUE_ORDERBY = 'InvalidParameterValue.OrderBy'
INVALIDPARAMETERVALUE_PARAM = 'InvalidParameterValue.Param'
INVALIDPARAMETERVALUE_PROTOCOLTYPE = 'InvalidParameterValue.ProtocolType'
INVALIDPARAMETERVALUE_PROVISIONTRIGGERCRONCONFIGDUPLICATE = 'InvalidParameterValue.ProvisionTriggerCronConfigDuplicate'
INVALIDPARAMETERVALUE_PROVISIONTRIGGERNAME = 'InvalidParameterValue.ProvisionTriggerName'
INVALIDPARAMETERVALUE_PROVISIONTRIGGERNAMEDUPLICATE = 'InvalidParameterValue.ProvisionTriggerNameDuplicate'
INVALIDPARAMETERVALUE_PROVISIONTYPE = 'InvalidParameterValue.ProvisionType'
INVALIDPARAMETERVALUE_PUBLICNETCONFIG = 'InvalidParameterValue.PublicNetConfig'
INVALIDPARAMETERVALUE_QUALIFIER = 'InvalidParameterValue.Qualifier'
INVALIDPARAMETERVALUE_REGISTRYID = 'InvalidParameterValue.RegistryId'
INVALIDPARAMETERVALUE_RETCODE = 'InvalidParameterValue.RetCode'
INVALIDPARAMETERVALUE_ROUTINGCONFIG = 'InvalidParameterValue.RoutingConfig'
INVALIDPARAMETERVALUE_RUNTIME = 'InvalidParameterValue.Runtime'
INVALIDPARAMETERVALUE_SEARCHKEY = 'InvalidParameterValue.SearchKey'
INVALIDPARAMETERVALUE_SECRETINFO = 'InvalidParameterValue.SecretInfo'
INVALIDPARAMETERVALUE_SERVICENAME = 'InvalidParameterValue.ServiceName'
INVALIDPARAMETERVALUE_STAMP = 'InvalidParameterValue.Stamp'
INVALIDPARAMETERVALUE_STARTTIME = 'InvalidParameterValue.StartTime'
INVALIDPARAMETERVALUE_STARTTIMEORENDTIME = 'InvalidParameterValue.StartTimeOrEndTime'
INVALIDPARAMETERVALUE_STATUS = 'InvalidParameterValue.Status'
INVALIDPARAMETERVALUE_SYSTEMENVIRONMENT = 'InvalidParameterValue.SystemEnvironment'
INVALIDPARAMETERVALUE_TEMPCOSOBJECTNAME = 'InvalidParameterValue.TempCosObjectName'
INVALIDPARAMETERVALUE_TRACEENABLE = 'InvalidParameterValue.TraceEnable'
INVALIDPARAMETERVALUE_TRACKINGTARGET = 'InvalidParameterValue.TrackingTarget'
INVALIDPARAMETERVALUE_TRIGGERCRONCONFIG = 'InvalidParameterValue.TriggerCronConfig'
INVALIDPARAMETERVALUE_TRIGGERCRONCONFIGTIMEINTERVAL = 'InvalidParameterValue.TriggerCronConfigTimeInterval'
INVALIDPARAMETERVALUE_TRIGGERDESC = 'InvalidParameterValue.TriggerDesc'
INVALIDPARAMETERVALUE_TRIGGERNAME = 'InvalidParameterValue.TriggerName'
INVALIDPARAMETERVALUE_TRIGGERPROVISIONEDCONCURRENCYNUM = 'InvalidParameterValue.TriggerProvisionedConcurrencyNum'
INVALIDPARAMETERVALUE_TYPE = 'InvalidParameterValue.Type'
INVALIDPARAMETERVALUE_VPCNOTSETWHENOPENCFS = 'InvalidParameterValue.VpcNotSetWhenOpenCfs'
INVALIDPARAMETERVALUE_WEBSOCKETSPARAMS = 'InvalidParameterValue.WebSocketsParams'
INVALIDPARAMETERVALUE_ZIPFILE = 'InvalidParameterValue.ZipFile'
INVALIDPARAMETERVALUE_ZIPFILEBASE64BINASCIIERROR = 'InvalidParameterValue.ZipFileBase64BinasciiError'
LIMITEXCEEDED_ALIAS = 'LimitExceeded.Alias'
LIMITEXCEEDED_CDN = 'LimitExceeded.Cdn'
LIMITEXCEEDED_EIP = 'LimitExceeded.Eip'
LIMITEXCEEDED_FUNCTION = 'LimitExceeded.Function'
LIMITEXCEEDED_FUNCTIONONTOPIC = 'LimitExceeded.FunctionOnTopic'
LIMITEXCEEDED_FUNCTIONPROVISIONEDCONCURRENCYMEMORY = 'LimitExceeded.FunctionProvisionedConcurrencyMemory'
LIMITEXCEEDED_FUNCTIONRESERVEDCONCURRENCYMEMORY = 'LimitExceeded.FunctionReservedConcurrencyMemory'
LIMITEXCEEDED_FUNCTIONTOTALPROVISIONEDCONCURRENCYMEMORY = 'LimitExceeded.FunctionTotalProvisionedConcurrencyMemory'
LIMITEXCEEDED_FUNCTIONTOTALPROVISIONEDCONCURRENCYNUM = 'LimitExceeded.FunctionTotalProvisionedConcurrencyNum'
LIMITEXCEEDED_INITTIMEOUT = 'LimitExceeded.InitTimeout'
LIMITEXCEEDED_LAYERVERSIONS = 'LimitExceeded.LayerVersions'
LIMITEXCEEDED_LAYERS = 'LimitExceeded.Layers'
LIMITEXCEEDED_MEMORY = 'LimitExceeded.Memory'
LIMITEXCEEDED_MSGTTL = 'LimitExceeded.MsgTTL'
LIMITEXCEEDED_NAMESPACE = 'LimitExceeded.Namespace'
LIMITEXCEEDED_OFFSET = 'LimitExceeded.Offset'
LIMITEXCEEDED_PROVISIONTRIGGERACTION = 'LimitExceeded.ProvisionTriggerAction'
LIMITEXCEEDED_PROVISIONTRIGGERINTERVAL = 'LimitExceeded.ProvisionTriggerInterval'
LIMITEXCEEDED_QUOTA = 'LimitExceeded.Quota'
LIMITEXCEEDED_RETRYNUM = 'LimitExceeded.RetryNum'
LIMITEXCEEDED_TIMEOUT = 'LimitExceeded.Timeout'
LIMITEXCEEDED_TOTALCONCURRENCYMEMORY = 'LimitExceeded.TotalConcurrencyMemory'
LIMITEXCEEDED_TRIGGER = 'LimitExceeded.Trigger'
LIMITEXCEEDED_USERTOTALCONCURRENCYMEMORY = 'LimitExceeded.UserTotalConcurrencyMemory'
MISSINGPARAMETER = 'MissingParameter'
MISSINGPARAMETER_CODE = 'MissingParameter.Code'
MISSINGPARAMETER_RUNTIME = 'MissingParameter.Runtime'
RESOURCEINUSE = 'ResourceInUse'
RESOURCEINUSE_ALIAS = 'ResourceInUse.Alias'
RESOURCEINUSE_CDN = 'ResourceInUse.Cdn'
RESOURCEINUSE_CMQ = 'ResourceInUse.Cmq'
RESOURCEINUSE_COS = 'ResourceInUse.Cos'
RESOURCEINUSE_FUNCTION = 'ResourceInUse.Function'
RESOURCEINUSE_FUNCTIONNAME = 'ResourceInUse.FunctionName'
RESOURCEINUSE_LAYERVERSION = 'ResourceInUse.LayerVersion'
RESOURCEINUSE_NAMESPACE = 'ResourceInUse.Namespace'
RESOURCEINUSE_TRIGGER = 'ResourceInUse.Trigger'
RESOURCEINUSE_TRIGGERNAME = 'ResourceInUse.TriggerName'
RESOURCEINSUFFICIENT_COS = 'ResourceInsufficient.COS'
RESOURCENOTFOUND = 'ResourceNotFound'
RESOURCENOTFOUND_ALIAS = 'ResourceNotFound.Alias'
RESOURCENOTFOUND_ASYNCEVENT = 'ResourceNotFound.AsyncEvent'
RESOURCENOTFOUND_CDN = 'ResourceNotFound.Cdn'
RESOURCENOTFOUND_CFSMOUNTINSNOTMATCH = 'ResourceNotFound.CfsMountInsNotMatch'
RESOURCENOTFOUND_CFSSTATUSERROR = 'ResourceNotFound.CfsStatusError'
RESOURCENOTFOUND_CFSVPCNOTMATCH = 'ResourceNotFound.CfsVpcNotMatch'
RESOURCENOTFOUND_CKAFKA = 'ResourceNotFound.Ckafka'
RESOURCENOTFOUND_CMQ = 'ResourceNotFound.Cmq'
RESOURCENOTFOUND_COS = 'ResourceNotFound.Cos'
RESOURCENOTFOUND_DEMO = 'ResourceNotFound.Demo'
RESOURCENOTFOUND_FUNCTION = 'ResourceNotFound.Function'
RESOURCENOTFOUND_FUNCTIONNAME = 'ResourceNotFound.FunctionName'
RESOURCENOTFOUND_FUNCTIONVERSION = 'ResourceNotFound.FunctionVersion'
RESOURCENOTFOUND_GETCFSMOUNTINSERROR = 'ResourceNotFound.GetCfsMountInsError'
RESOURCENOTFOUND_GETCFSNOTMATCH = 'ResourceNotFound.GetCfsNotMatch'
RESOURCENOTFOUND_IMAGECONFIG = 'ResourceNotFound.ImageConfig'
RESOURCENOTFOUND_LAYER = 'ResourceNotFound.Layer'
RESOURCENOTFOUND_LAYERVERSION = 'ResourceNotFound.LayerVersion'
RESOURCENOTFOUND_NAMESPACE = 'ResourceNotFound.Namespace'
RESOURCENOTFOUND_QUALIFIER = 'ResourceNotFound.Qualifier'
RESOURCENOTFOUND_ROLE = 'ResourceNotFound.Role'
RESOURCENOTFOUND_ROLECHECK = 'ResourceNotFound.RoleCheck'
RESOURCENOTFOUND_TIMER = 'ResourceNotFound.Timer'
RESOURCENOTFOUND_TOTALCONCURRENCYMEMORY = 'ResourceNotFound.TotalConcurrencyMemory'
RESOURCENOTFOUND_TRIGGER = 'ResourceNotFound.Trigger'
RESOURCENOTFOUND_VERSION = 'ResourceNotFound.Version'
RESOURCENOTFOUND_VPC = 'ResourceNotFound.Vpc'
RESOURCEUNAVAILABLE_INSUFFICIENTBALANCE = 'ResourceUnavailable.InsufficientBalance'
RESOURCEUNAVAILABLE_NAMESPACE = 'ResourceUnavailable.Namespace'
UNAUTHORIZEDOPERATION = 'UnauthorizedOperation'
UNAUTHORIZEDOPERATION_CAM = 'UnauthorizedOperation.CAM'
UNAUTHORIZEDOPERATION_CODESECRET = 'UnauthorizedOperation.CodeSecret'
UNAUTHORIZEDOPERATION_CREATETRIGGER = 'UnauthorizedOperation.CreateTrigger'
UNAUTHORIZEDOPERATION_DELETEFUNCTION = 'UnauthorizedOperation.DeleteFunction'
UNAUTHORIZEDOPERATION_DELETETRIGGER = 'UnauthorizedOperation.DeleteTrigger'
UNAUTHORIZEDOPERATION_NOTMC = 'UnauthorizedOperation.NotMC'
UNAUTHORIZEDOPERATION_REGION = 'UnauthorizedOperation.Region'
UNAUTHORIZEDOPERATION_ROLE = 'UnauthorizedOperation.Role'
UNAUTHORIZEDOPERATION_TEMPCOSAPPID = 'UnauthorizedOperation.TempCosAppid'
UNAUTHORIZEDOPERATION_UPDATEFUNCTIONCODE = 'UnauthorizedOperation.UpdateFunctionCode'
UNSUPPORTEDOPERATION = 'UnsupportedOperation'
UNSUPPORTEDOPERATION_ALIASBIND = 'UnsupportedOperation.AliasBind'
UNSUPPORTEDOPERATION_ASYNCRUNENABLE = 'UnsupportedOperation.AsyncRunEnable'
UNSUPPORTEDOPERATION_CDN = 'UnsupportedOperation.Cdn'
UNSUPPORTEDOPERATION_COS = 'UnsupportedOperation.Cos'
UNSUPPORTEDOPERATION_EIPFIXED = 'UnsupportedOperation.EipFixed'
UNSUPPORTEDOPERATION_REGION = 'UnsupportedOperation.Region'
UNSUPPORTEDOPERATION_TRIGGER = 'UnsupportedOperation.Trigger'
UNSUPPORTEDOPERATION_UPDATEFUNCTIONEVENTINVOKECONFIG = 'UnsupportedOperation.UpdateFunctionEventInvokeConfig'
UNSUPPORTEDOPERATION_VPCCONFIG = 'UnsupportedOperation.VpcConfig'
|
from runner.koan import *
class AboutIteration(Koan):
def test_iterators_are_a_type(self):
it = iter(range(1,6))
total = 0
for num in it:
total += num
self.assertEqual(15 , total)
def test_iterating_with_next(self):
stages = iter(['alpha','beta','gamma'])
try:
self.assertEqual('alpha', next(stages))
next(stages)
self.assertEqual('gamma', next(stages))
next(stages)
except StopIteration as ex:
err_msg = 'Ran out of iterations'
self.assertRegex(err_msg, 'Ran out')
# ------------------------------------------------------------------
def add_ten(self, item):
return item + 10
def test_map_transforms_elements_of_a_list(self):
seq = [1, 2, 3]
mapped_seq = list()
mapping = map(self.add_ten, seq)
self.assertNotEqual(list, mapping.__class__)
self.assertEqual(map, mapping.__class__)
# In Python 3 built in iterator funcs return iterable view objects
# instead of lists
for item in mapping:
mapped_seq.append(item)
self.assertEqual([11, 12, 13], mapped_seq)
# Note, iterator methods actually return objects of iter type in
# python 3. In python 2 map() would give you a list.
def test_filter_selects_certain_items_from_a_list(self):
def is_even(item):
return (item % 2) == 0
seq = [1, 2, 3, 4, 5, 6]
even_numbers = list()
for item in filter(is_even, seq):
even_numbers.append(item)
self.assertEqual([2,4,6], even_numbers)
def test_just_return_first_item_found(self):
def is_big_name(item):
return len(item) > 4
names = ["Jim", "Bill", "Clarence", "Doug", "Eli"]
name = None
iterator = filter(is_big_name, names)
try:
name = next(iterator)
except StopIteration:
msg = 'Ran out of big names'
self.assertEqual("Clarence", name)
# ------------------------------------------------------------------
def add(self,accum,item):
return accum + item
def multiply(self,accum,item):
return accum * item
def test_reduce_will_blow_your_mind(self):
import functools
# As of Python 3 reduce() has been demoted from a builtin function
# to the functools module.
result = functools.reduce(self.add, [2, 3, 4])
self.assertEqual(int, result.__class__)
# Reduce() syntax is same as Python 2
self.assertEqual(9, result)
result2 = functools.reduce(self.multiply, [2, 3, 4], 1)
self.assertEqual(24, result2)
# Extra Credit:
# Describe in your own words what reduce does.
# ------------------------------------------------------------------
def test_use_pass_for_iterations_with_no_body(self):
for num in range(1,5):
pass
self.assertEqual(4, num)
# ------------------------------------------------------------------
def test_all_iteration_methods_work_on_any_sequence_not_just_lists(self):
# Ranges are an iterable sequence
result = map(self.add_ten, range(1,4))
self.assertEqual([11, 12, 13], list(result))
try:
file = open("example_file.txt")
try:
def make_upcase(line):
return line.strip().upper()
upcase_lines = map(make_upcase, file.readlines())
self.assertEqual(["THIS", "IS", "A", "TEST"] , list(upcase_lines))
finally:
# Arg, this is ugly.
# We will figure out how to fix this later.
file.close()
except IOError:
# should never happen
self.fail()
|
from api_request import Api
from util import Util
from twocheckout import Twocheckout
class Sale(Twocheckout):
def __init__(self, dict_):
super(self.__class__, self).__init__(dict_)
@classmethod
def find(cls, params=None):
if params is None:
params = dict()
response = cls(Api.call('sales/detail_sale', params))
return response.sale
@classmethod
def list(cls, params=None):
if params is None:
params = dict()
response = cls(Api.call('sales/list_sales', params))
return response.sale_summary
def refund(self, params=None):
if params is None:
params = dict()
if hasattr(self, 'lineitem_id'):
params['lineitem_id'] = self.lineitem_id
url = 'sales/refund_lineitem'
elif hasattr(self, 'invoice_id'):
params['invoice_id'] = self.invoice_id
url = 'sales/refund_invoice'
else:
params['sale_id'] = self.sale_id
url = 'sales/refund_invoice'
return Sale(Api.call(url, params))
def stop(self, params=None):
if params is None:
params = dict()
if hasattr(self, 'lineitem_id'):
params['lineitem_id'] = self.lineitem_id
return Api.call('sales/stop_lineitem_recurring', params)
elif hasattr(self, 'sale_id'):
active_lineitems = Util.active(self)
if dict(active_lineitems):
result = dict()
i = 0
for k, v in active_lineitems.items():
lineitem_id = v
params = {'lineitem_id': lineitem_id}
result[i] = Api.call('sales/stop_lineitem_recurring', params)
i += 1
response = { "response_code": "OK",
"response_message": str(len(result)) + " lineitems stopped successfully"
}
else:
response = {
"response_code": "NOTICE",
"response_message": "No active recurring lineitems"
}
else:
response = { "response_code": "NOTICE",
"response_message": "This method can only be called on a sale or lineitem"
}
return Sale(response)
def active(self):
active_lineitems = Util.active(self)
if dict(active_lineitems):
result = dict()
i = 0
for k, v in active_lineitems.items():
lineitem_id = v
result[i] = lineitem_id
i += 1
response = { "response_code": "ACTIVE",
"response_message": str(len(result)) + " active recurring lineitems"
}
else:
response = {
"response_code": "NOTICE","response_message":
"No active recurring lineitems"
}
return Sale(response)
def comment(self, params=None):
if params is None:
params = dict()
params['sale_id'] = self.sale_id
return Sale(Api.call('sales/create_comment', params))
def ship(self, params=None):
if params is None:
params = dict()
params['sale_id'] = self.sale_id
return Sale(Api.call('sales/mark_shipped', params))
|
import json
import os
from flask import request, g, render_template, make_response, jsonify, Response
from helpers.raw_endpoint import get_id, store_json_to_file
from helpers.groups import get_groups
from json_controller import JSONController
from main import app
from pymongo import MongoClient, errors
HERE = os.path.dirname(os.path.abspath(__file__))
def connect_client():
"""Connects to Mongo client"""
try:
return MongoClient(app.config['DB_HOST'], int(app.config['DB_PORT']))
except errors.ConnectionFailure as e:
raise e
def get_db():
"""Connects to Mongo database"""
if not hasattr(g, 'mongo_client'):
g.mongo_client = connect_client()
g.mongo_db = getattr(g.mongo_client, app.config['DB_NAME'])
g.groups_collection = g.mongo_db[os.environ.get('DB_GROUPS_COLLECTION')]
return g.mongo_db
@app.teardown_appcontext
def close_db(error):
"""Closes connection with Mongo client"""
if hasattr(g, 'mongo_client'):
g.mongo_client.close()
@app.route('/')
@app.route('/index/')
def index():
"""Landing page for SciNet"""
return render_template("index.html")
@app.route('/faq/')
def faq():
"""FAQ page for SciNet"""
return render_template("faq.html")
@app.route('/leaderboard/')
def leaderboard():
"""Leaderboard page for SciNet"""
get_db()
groups = get_groups(g.groups_collection)
return render_template("leaderboard.html", groups=groups)
@app.route('/ping', methods=['POST'])
def ping_endpoint():
"""API endpoint determines potential article hash exists in db
:return: status code 204 -- hash not present, continue submission
:return: status code 201 -- hash already exists, drop submission
"""
db = get_db()
target_hash = request.form.get('hash')
if db.raw.find({'hash': target_hash}).count():
return Response(status=201)
else:
return Response(status=204)
@app.route('/articles')
def ArticleEndpoint():
"""Eventual landing page for searching/retrieving articles"""
if request.method == 'GET':
return render_template("articles.html")
@app.route('/raw', methods=['POST'])
def raw_endpoint():
"""API endpoint for submitting raw article data
:return: status code 405 - invalid JSON or invalid request type
:return: status code 400 - unsupported content-type or invalid publisher
:return: status code 201 - successful submission
"""
# Ensure post's content-type is supported
if request.headers['content-type'] == 'application/json':
# Ensure data is a valid JSON
try:
user_submission = json.loads(request.data)
except ValueError:
return Response(status=405)
# generate UID for new entry
uid = get_id()
# store incoming JSON in raw storage
file_path = os.path.join(
HERE,
'raw_payloads',
str(uid)
)
store_json_to_file(user_submission, file_path)
# hand submission to controller and return Resposne
db = get_db()
controller_response = JSONController(user_submission, db=db, _id=uid).submit()
return controller_response
# User submitted an unsupported content-type
else:
return Response(status=400)
@app.route('/requestnewgroup/', methods=['POST'])
def request_new_group():
# Grab submission form data and prepare email message
data = request.json
msg = "Someone has request that you add {group_name} to the leaderboard \
groups. The groups website is {group_website} and the submitter can \
be reached at {submitter_email}.".format(
group_name=data['new_group_name'],
group_website=data['new_group_website'],
submitter_email=data['submitter_email'])
return Response(status=200)
'''
try:
email(
subject="SciNet: A new group has been requested",
fro="[email protected]",
to='[email protected]',
msg=msg)
return Response(status=200)
except:
return Response(status=500)
'''
@app.errorhandler(404)
def not_found(error):
return make_response(jsonify( { 'error': 'Page Not Found' } ), 404)
@app.errorhandler(405)
def method_not_allowed(error):
return make_response(jsonify( { 'error': 'Method Not Allowed' } ), 405)
|
from corecat.constants import OBJECT_CODES, MODEL_VERSION
from ._sqlalchemy import Base, CoreCatBaseMixin
from ._sqlalchemy import Column, \
Integer, \
String, Text
class Project(CoreCatBaseMixin, Base):
"""Project Model class represent for the 'projects' table
which is used to store project's basic information."""
# Add the real table name here.
# TODO: Add the database prefix here
__tablename__ = 'project'
# Column definition
project_id = Column('id', Integer,
primary_key=True,
autoincrement=True
)
project_name = Column('name', String(100),
nullable=False
)
project_description = Column('description', Text,
nullable=True
)
# Relationship
# TODO: Building relationship
def __init__(self, project_name,
created_by_user_id,
**kwargs):
"""
Constructor of Project Model Class.
:param project_name: Name of the project.
:param created_by_user_id: Project is created under this user ID.
:param project_description: Description of the project.
"""
self.set_up_basic_information(
MODEL_VERSION[OBJECT_CODES['Project']],
created_by_user_id
)
self.project_name = project_name
self.project_description = kwargs.get('project_description', None)
|
from ansible.module_utils.hashivault import hashivault_argspec
from ansible.module_utils.hashivault import hashivault_auth_client
from ansible.module_utils.hashivault import hashivault_init
from ansible.module_utils.hashivault import hashiwrapper
ANSIBLE_METADATA = {'status': ['stableinterface'], 'supported_by': 'community', 'version': '1.1'}
DOCUMENTATION = '''
---
module: hashivault_approle_role_get
version_added: "3.8.0"
short_description: Hashicorp Vault approle role get module
description:
- Module to get a approle role from Hashicorp Vault.
options:
name:
description:
- role name.
mount_point:
description:
- mount point for role
default: approle
extends_documentation_fragment: hashivault
'''
EXAMPLES = '''
---
- hosts: localhost
tasks:
- hashivault_approle_role_get:
name: 'ashley'
register: 'vault_approle_role_get'
- debug: msg="Role is {{vault_approle_role_get.role}}"
'''
def main():
argspec = hashivault_argspec()
argspec['name'] = dict(required=True, type='str')
argspec['mount_point'] = dict(required=False, type='str', default='approle')
module = hashivault_init(argspec)
result = hashivault_approle_role_get(module.params)
if result.get('failed'):
module.fail_json(**result)
else:
module.exit_json(**result)
@hashiwrapper
def hashivault_approle_role_get(params):
name = params.get('name')
client = hashivault_auth_client(params)
result = client.get_role(name, mount_point=params.get('mount_point'))
return {'role': result}
if __name__ == '__main__':
main()
|
from scrapy.spiders import Spider
from scrapy.selector import Selector
from scrapy.http import HtmlResponse
from FIFAscrape.items import PlayerItem
from urlparse import urlparse, urljoin
from scrapy.http.request import Request
from scrapy.conf import settings
import random
import time
class fifaSpider(Spider):
name = "fifa"
allowed_domains = ["futhead.com"]
start_urls = [
"http://www.futhead.com/16/players/?level=all_nif&bin_platform=ps"
]
def parse(self, response):
#obtains links from page to page and passes links to parse_playerURL
sel = Selector(response) #define selector based on response object (points to urls in start_urls by default)
url_list = sel.xpath('//a[@class="display-block padding-0"]/@href') #obtain a list of href links that contain relative links of players
for i in url_list:
relative_url = self.clean_str(i.extract()) #i is a selector and hence need to extract it to obtain unicode object
print urljoin(response.url, relative_url) #urljoin is able to merge absolute and relative paths to form 1 coherent link
req = Request(urljoin(response.url, relative_url),callback=self.parse_playerURL) #pass on request with new urls to parse_playerURL
req.headers["User-Agent"] = self.random_ua()
yield req
next_url=sel.xpath('//div[@class="right-nav pull-right"]/a[@rel="next"]/@href').extract_first()
if(next_url): #checks if next page exists
clean_next_url = self.clean_str(next_url)
reqNext = Request(urljoin(response.url, clean_next_url),callback=self.parse) #calls back this function to repeat process on new list of links
yield reqNext
def parse_playerURL(self, response):
#parses player specific data into items list
site = Selector(response)
items = []
item = PlayerItem()
item['1name'] = (response.url).rsplit("/")[-2].replace("-"," ")
title = self.clean_str(site.xpath('/html/head/title/text()').extract_first())
item['OVR'] = title.partition("FIFA 16 -")[1].split("-")[0]
item['POS'] = self.clean_str(site.xpath('//div[@class="playercard-position"]/text()').extract_first())
#stats = site.xpath('//div[@class="row player-center-container"]/div/a')
stat_names = site.xpath('//span[@class="player-stat-title"]')
stat_values = site.xpath('//span[contains(@class, "player-stat-value")]')
for index in range(len(stat_names)):
attr_name = stat_names[index].xpath('.//text()').extract_first()
item[attr_name] = stat_values[index].xpath('.//text()').extract_first()
items.append(item)
return items
def clean_str(self,ustring):
#removes wierd unicode chars (/u102 bla), whitespaces, tabspaces, etc to form clean string
return str(ustring.encode('ascii', 'replace')).strip()
def random_ua(self):
#randomise user-agent from list to reduce chance of being banned
ua = random.choice(settings.get('USER_AGENT_LIST'))
if ua:
ua='Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2226.0 Safari/537.36'
return ua
|
print("hello!!!!")
|
from copy import deepcopy
from typing import Any, Awaitable, Optional, TYPE_CHECKING
from azure.core.rest import AsyncHttpResponse, HttpRequest
from azure.mgmt.core import AsyncARMPipelineClient
from msrest import Deserializer, Serializer
from .. import models
from ._configuration import SqlVirtualMachineManagementClientConfiguration
from .operations import AvailabilityGroupListenersOperations, Operations, SqlVirtualMachineGroupsOperations, SqlVirtualMachinesOperations
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials_async import AsyncTokenCredential
class SqlVirtualMachineManagementClient:
"""The SQL virtual machine management API provides a RESTful set of web APIs that interact with Azure Compute, Network & Storage services to manage your SQL Server virtual machine. The API enables users to create, delete and retrieve a SQL virtual machine, SQL virtual machine group or availability group listener.
:ivar availability_group_listeners: AvailabilityGroupListenersOperations operations
:vartype availability_group_listeners:
azure.mgmt.sqlvirtualmachine.aio.operations.AvailabilityGroupListenersOperations
:ivar operations: Operations operations
:vartype operations: azure.mgmt.sqlvirtualmachine.aio.operations.Operations
:ivar sql_virtual_machine_groups: SqlVirtualMachineGroupsOperations operations
:vartype sql_virtual_machine_groups:
azure.mgmt.sqlvirtualmachine.aio.operations.SqlVirtualMachineGroupsOperations
:ivar sql_virtual_machines: SqlVirtualMachinesOperations operations
:vartype sql_virtual_machines:
azure.mgmt.sqlvirtualmachine.aio.operations.SqlVirtualMachinesOperations
:param credential: Credential needed for the client to connect to Azure.
:type credential: ~azure.core.credentials_async.AsyncTokenCredential
:param subscription_id: Subscription ID that identifies an Azure subscription.
:type subscription_id: str
:param base_url: Service URL. Default value is 'https://management.azure.com'.
:type base_url: str
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
"""
def __init__(
self,
credential: "AsyncTokenCredential",
subscription_id: str,
base_url: str = "https://management.azure.com",
**kwargs: Any
) -> None:
self._config = SqlVirtualMachineManagementClientConfiguration(credential=credential, subscription_id=subscription_id, **kwargs)
self._client = AsyncARMPipelineClient(base_url=base_url, config=self._config, **kwargs)
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
self._serialize = Serializer(client_models)
self._deserialize = Deserializer(client_models)
self._serialize.client_side_validation = False
self.availability_group_listeners = AvailabilityGroupListenersOperations(self._client, self._config, self._serialize, self._deserialize)
self.operations = Operations(self._client, self._config, self._serialize, self._deserialize)
self.sql_virtual_machine_groups = SqlVirtualMachineGroupsOperations(self._client, self._config, self._serialize, self._deserialize)
self.sql_virtual_machines = SqlVirtualMachinesOperations(self._client, self._config, self._serialize, self._deserialize)
def _send_request(
self,
request: HttpRequest,
**kwargs: Any
) -> Awaitable[AsyncHttpResponse]:
"""Runs the network request through the client's chained policies.
>>> from azure.core.rest import HttpRequest
>>> request = HttpRequest("GET", "https://www.example.org/")
<HttpRequest [GET], url: 'https://www.example.org/'>
>>> response = await client._send_request(request)
<AsyncHttpResponse: 200 OK>
For more information on this code flow, see https://aka.ms/azsdk/python/protocol/quickstart
:param request: The network request you want to make. Required.
:type request: ~azure.core.rest.HttpRequest
:keyword bool stream: Whether the response payload will be streamed. Defaults to False.
:return: The response of your network call. Does not do error handling on your response.
:rtype: ~azure.core.rest.AsyncHttpResponse
"""
request_copy = deepcopy(request)
request_copy.url = self._client.format_url(request_copy.url)
return self._client.send_request(request_copy, **kwargs)
async def close(self) -> None:
await self._client.close()
async def __aenter__(self) -> "SqlVirtualMachineManagementClient":
await self._client.__aenter__()
return self
async def __aexit__(self, *exc_details) -> None:
await self._client.__aexit__(*exc_details)
|
from flask import Blueprint, request, render_template
from ..load import processing_results
from ..abbr import get_abbr_map
abbr_map = get_abbr_map()
liner_mod = Blueprint('liner', __name__, template_folder='templates', static_folder='static')
@liner_mod.route('/liner', methods=['GET', 'POST'])
def liner():
if request.method == 'POST':
query = request.form['liner-text']
text = query.split('.')[:-1]
if len(text) == 0:
return render_template('projects/line.html', message='Please separate each line with "."')
abbr_expanded_text = ""
for word in query.split():
if word in abbr_map:
abbr_expanded_text += abbr_map[word]
else:
abbr_expanded_text += word
abbr_expanded_text += " "
data, emotion_sents, score, line_sentiment, text, length = processing_results(text)
return render_template('projects/line.html', data=[data, emotion_sents, score, zip(text, line_sentiment), length, abbr_expanded_text])
else:
return render_template('projects/line.html')
|
from msrest.serialization import Model
class Dimension(Model):
"""Dimension of a resource metric. For e.g. instance specific HTTP requests
for a web app,
where instance name is dimension of the metric HTTP request.
:param name:
:type name: str
:param display_name:
:type display_name: str
:param internal_name:
:type internal_name: str
:param to_be_exported_for_shoebox:
:type to_be_exported_for_shoebox: bool
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'internal_name': {'key': 'internalName', 'type': 'str'},
'to_be_exported_for_shoebox': {'key': 'toBeExportedForShoebox', 'type': 'bool'},
}
def __init__(self, name=None, display_name=None, internal_name=None, to_be_exported_for_shoebox=None):
super(Dimension, self).__init__()
self.name = name
self.display_name = display_name
self.internal_name = internal_name
self.to_be_exported_for_shoebox = to_be_exported_for_shoebox
|
import asyncio
import discord
import datetime
import pytz
from discord.ext import commands
from Cogs import FuzzySearch
from Cogs import Settings
from Cogs import DisplayName
from Cogs import Message
from Cogs import Nullify
class Time:
# Init with the bot reference, and a reference to the settings var
def __init__(self, bot, settings):
self.bot = bot
self.settings = settings
@commands.command(pass_context=True)
async def settz(self, ctx, *, tz : str = None):
"""Sets your TimeZone - Overrides your UTC offset - and accounts for DST."""
usage = 'Usage: `{}settz [Region/City]`\nYou can get a list of available TimeZones with `{}listtz`'.format(ctx.prefix, ctx.prefix)
if not tz:
self.settings.setGlobalUserStat(ctx.author, "TimeZone", None)
await ctx.channel.send("*{}*, your TimeZone has been removed!".format(DisplayName.name(ctx.author)))
return
# Let's get the timezone list
tz_list = FuzzySearch.search(tz, pytz.all_timezones, None, 3)
if not tz_list[0]['Ratio'] == 1:
# We didn't find a complete match
msg = "I couldn't find that TimeZone!\n\nMaybe you meant one of the following?\n```"
for tz in tz_list:
msg += tz['Item'] + "\n"
msg += '```'
await ctx.channel.send(msg)
return
# We got a time zone
self.settings.setGlobalUserStat(ctx.author, "TimeZone", tz_list[0]['Item'])
await ctx.channel.send("TimeZone set to *{}!*".format(tz_list[0]['Item']))
@commands.command(pass_context=True)
async def listtz(self, ctx, *, tz_search = None):
"""List all the supported TimeZones in PM."""
if not tz_search:
msg = "__Available TimeZones:__\n\n"
for tz in pytz.all_timezones:
msg += tz + "\n"
else:
tz_list = FuzzySearch.search(tz_search, pytz.all_timezones)
msg = "__Top 3 TimeZone Matches:__\n\n"
for tz in tz_list:
msg += tz['Item'] + "\n"
await Message.say(self.bot, msg, ctx.channel, ctx.author, 1)
@commands.command(pass_context=True)
async def tz(self, ctx, *, member = None):
"""See a member's TimeZone."""
# Check if we're suppressing @here and @everyone mentions
if self.settings.getServerStat(ctx.message.guild, "SuppressMentions").lower() == "yes":
suppress = True
else:
suppress = False
if member == None:
member = ctx.message.author
if type(member) == str:
# Try to get a user first
memberName = member
member = DisplayName.memberForName(memberName, ctx.message.guild)
if not member:
msg = 'Couldn\'t find user *{}*.'.format(memberName)
# Check for suppress
if suppress:
msg = Nullify.clean(msg)
await ctx.channel.send(msg)
return
# We got one
timezone = self.settings.getGlobalUserStat(member, "TimeZone")
if timezone == None:
msg = '*{}* hasn\'t set their TimeZone yet - they can do so with the `{}settz [Region/City]` command.'.format(DisplayName.name(member), ctx.prefix)
await ctx.channel.send(msg)
return
msg = '*{}\'s* TimeZone is *{}*'.format(DisplayName.name(member), timezone)
await ctx.channel.send(msg)
@commands.command(pass_context=True)
async def setoffset(self, ctx, *, offset : str = None):
"""Set your UTC offset."""
if offset == None:
self.settings.setGlobalUserStat(ctx.message.author, "UTCOffset", None)
msg = '*{}*, your UTC offset has been removed!'.format(DisplayName.name(ctx.message.author))
await ctx.channel.send(msg)
return
offset = offset.replace('+', '')
# Split time string by : and get hour/minute values
try:
hours, minutes = map(int, offset.split(':'))
except Exception:
try:
hours = int(offset)
minutes = 0
except Exception:
await ctx.channel.send('Offset has to be in +-H:M!')
return
off = "{}:{}".format(hours, minutes)
self.settings.setGlobalUserStat(ctx.message.author, "UTCOffset", off)
msg = '*{}*, your UTC offset has been set to *{}!*'.format(DisplayName.name(ctx.message.author), off)
await ctx.channel.send(msg)
@commands.command(pass_context=True)
async def offset(self, ctx, *, member = None):
"""See a member's UTC offset."""
# Check if we're suppressing @here and @everyone mentions
if self.settings.getServerStat(ctx.message.guild, "SuppressMentions").lower() == "yes":
suppress = True
else:
suppress = False
if member == None:
member = ctx.message.author
if type(member) == str:
# Try to get a user first
memberName = member
member = DisplayName.memberForName(memberName, ctx.message.guild)
if not member:
msg = 'Couldn\'t find user *{}*.'.format(memberName)
# Check for suppress
if suppress:
msg = Nullify.clean(msg)
await ctx.channel.send(msg)
return
# We got one
offset = self.settings.getGlobalUserStat(member, "UTCOffset")
if offset == None:
msg = '*{}* hasn\'t set their offset yet - they can do so with the `{}setoffset [+-offset]` command.'.format(DisplayName.name(member), ctx.prefix)
await ctx.channel.send(msg)
return
# Split time string by : and get hour/minute values
try:
hours, minutes = map(int, offset.split(':'))
except Exception:
try:
hours = int(offset)
minutes = 0
except Exception:
await ctx.channel.send('Offset has to be in +-H:M!')
return
msg = 'UTC'
# Apply offset
if hours > 0:
# Apply positive offset
msg += '+{}'.format(offset)
elif hours < 0:
# Apply negative offset
msg += '{}'.format(offset)
msg = '*{}\'s* offset is *{}*'.format(DisplayName.name(member), msg)
await ctx.channel.send(msg)
@commands.command(pass_context=True)
async def time(self, ctx, *, offset : str = None):
"""Get UTC time +- an offset."""
timezone = None
if offset == None:
member = ctx.message.author
else:
# Try to get a user first
member = DisplayName.memberForName(offset, ctx.message.guild)
if member:
# We got one
# Check for timezone first
offset = self.settings.getGlobalUserStat(member, "TimeZone")
if offset == None:
offset = self.settings.getGlobalUserStat(member, "UTCOffset")
if offset == None:
msg = '*{}* hasn\'t set their TimeZone or offset yet - they can do so with the `{}setoffset [+-offset]` or `{}settz [Region/City]` command.\nThe current UTC time is *{}*.'.format(DisplayName.name(member), ctx.prefix, ctx.prefix, datetime.datetime.utcnow().strftime("%I:%M %p"))
await ctx.channel.send(msg)
return
# At this point - we need to determine if we have an offset - or possibly a timezone passed
t = self.getTimeFromTZ(offset)
if t == None:
# We did not get an offset
t = self.getTimeFromOffset(offset)
if t == None:
await ctx.channel.send("I couldn't find that TimeZone or offset!")
return
if member:
msg = '{}; where *{}* is, it\'s currently *{}*'.format(t["zone"], DisplayName.name(member), t["time"])
else:
msg = '{} is currently *{}*'.format(t["zone"], t["time"])
# Say message
await ctx.channel.send(msg)
def getTimeFromOffset(self, offset):
offset = offset.replace('+', '')
# Split time string by : and get hour/minute values
try:
hours, minutes = map(int, offset.split(':'))
except Exception:
try:
hours = int(offset)
minutes = 0
except Exception:
return None
# await ctx.channel.send('Offset has to be in +-H:M!')
# return
msg = 'UTC'
# Get the time
t = datetime.datetime.utcnow()
# Apply offset
if hours > 0:
# Apply positive offset
msg += '+{}'.format(offset)
td = datetime.timedelta(hours=hours, minutes=minutes)
newTime = t + td
elif hours < 0:
# Apply negative offset
msg += '{}'.format(offset)
td = datetime.timedelta(hours=(-1*hours), minutes=(-1*minutes))
newTime = t - td
else:
# No offset
newTime = t
return { "zone" : msg, "time" : newTime.strftime("%I:%M %p") }
def getTimeFromTZ(self, tz):
# Assume sanitized zones - as they're pulled from pytz
# Let's get the timezone list
tz_list = FuzzySearch.search(tz, pytz.all_timezones, None, 3)
if not tz_list[0]['Ratio'] == 1:
# We didn't find a complete match
return None
zone = pytz.timezone(tz_list[0]['Item'])
zone_now = datetime.datetime.now(zone)
return { "zone" : tz_list[0]['Item'], "time" : zone_now.strftime("%I:%M %p") }
|
import unittest
from katas.beta.what_color_is_your_name import string_color
class StringColorTestCase(unittest.TestCase):
def test_equal_1(self):
self.assertEqual(string_color('Jack'), '79CAE5')
def test_equal_2(self):
self.assertEqual(string_color('Joshua'), '6A10D6')
def test_equal_3(self):
self.assertEqual(string_color('Joshua Smith'), '8F00FB')
def test_equal_4(self):
self.assertEqual(string_color('Hayden Smith'), '7E00EE')
def test_equal_5(self):
self.assertEqual(string_color('Mathew Smith'), '8B00F1')
def test_is_none_1(self):
self.assertIsNone(string_color('a'))
|
from sqlalchemy.testing import eq_, assert_raises, assert_raises_message, \
config, is_
import re
from sqlalchemy.testing.util import picklers
from sqlalchemy.interfaces import ConnectionProxy
from sqlalchemy import MetaData, Integer, String, INT, VARCHAR, func, \
bindparam, select, event, TypeDecorator, create_engine, Sequence
from sqlalchemy.sql import column, literal
from sqlalchemy.testing.schema import Table, Column
import sqlalchemy as tsa
from sqlalchemy import testing
from sqlalchemy.testing import engines
from sqlalchemy import util
from sqlalchemy.testing.engines import testing_engine
import logging.handlers
from sqlalchemy.dialects.oracle.zxjdbc import ReturningParam
from sqlalchemy.engine import result as _result, default
from sqlalchemy.engine.base import Engine
from sqlalchemy.testing import fixtures
from sqlalchemy.testing.mock import Mock, call, patch
from contextlib import contextmanager
users, metadata, users_autoinc = None, None, None
class ExecuteTest(fixtures.TestBase):
__backend__ = True
@classmethod
def setup_class(cls):
global users, users_autoinc, metadata
metadata = MetaData(testing.db)
users = Table('users', metadata,
Column('user_id', INT, primary_key=True, autoincrement=False),
Column('user_name', VARCHAR(20)),
)
users_autoinc = Table('users_autoinc', metadata,
Column('user_id', INT, primary_key=True,
test_needs_autoincrement=True),
Column('user_name', VARCHAR(20)),
)
metadata.create_all()
@engines.close_first
def teardown(self):
testing.db.execute(users.delete())
@classmethod
def teardown_class(cls):
metadata.drop_all()
@testing.fails_on("postgresql+pg8000",
"pg8000 still doesn't allow single % without params")
def test_no_params_option(self):
stmt = "SELECT '%'" + testing.db.dialect.statement_compiler(
testing.db.dialect, None).default_from()
conn = testing.db.connect()
result = conn.\
execution_options(no_parameters=True).\
scalar(stmt)
eq_(result, '%')
@testing.fails_on_everything_except('firebird',
'sqlite', '+pyodbc',
'+mxodbc', '+zxjdbc', 'mysql+oursql')
def test_raw_qmark(self):
def go(conn):
conn.execute('insert into users (user_id, user_name) '
'values (?, ?)', (1, 'jack'))
conn.execute('insert into users (user_id, user_name) '
'values (?, ?)', [2, 'fred'])
conn.execute('insert into users (user_id, user_name) '
'values (?, ?)', [3, 'ed'], [4, 'horse'])
conn.execute('insert into users (user_id, user_name) '
'values (?, ?)', (5, 'barney'), (6, 'donkey'))
conn.execute('insert into users (user_id, user_name) '
'values (?, ?)', 7, 'sally')
res = conn.execute('select * from users order by user_id')
assert res.fetchall() == [
(1, 'jack'),
(2, 'fred'),
(3, 'ed'),
(4, 'horse'),
(5, 'barney'),
(6, 'donkey'),
(7, 'sally'),
]
for multiparam, param in [
(("jack", "fred"), {}),
((["jack", "fred"],), {})
]:
res = conn.execute(
"select * from users where user_name=? or "
"user_name=? order by user_id",
*multiparam, **param)
assert res.fetchall() == [
(1, 'jack'),
(2, 'fred')
]
res = conn.execute("select * from users where user_name=?",
"jack"
)
assert res.fetchall() == [(1, 'jack')]
conn.execute('delete from users')
go(testing.db)
conn = testing.db.connect()
try:
go(conn)
finally:
conn.close()
# some psycopg2 versions bomb this.
@testing.fails_on_everything_except('mysql+mysqldb', 'mysql+pymysql',
'mysql+cymysql', 'mysql+mysqlconnector', 'postgresql')
@testing.fails_on('postgresql+zxjdbc', 'sprintf not supported')
def test_raw_sprintf(self):
def go(conn):
conn.execute('insert into users (user_id, user_name) '
'values (%s, %s)', [1, 'jack'])
conn.execute('insert into users (user_id, user_name) '
'values (%s, %s)', [2, 'ed'], [3, 'horse'])
conn.execute('insert into users (user_id, user_name) '
'values (%s, %s)', 4, 'sally')
conn.execute('insert into users (user_id) values (%s)', 5)
res = conn.execute('select * from users order by user_id')
assert res.fetchall() == [(1, 'jack'), (2, 'ed'), (3,
'horse'), (4, 'sally'), (5, None)]
for multiparam, param in [
(("jack", "ed"), {}),
((["jack", "ed"],), {})
]:
res = conn.execute(
"select * from users where user_name=%s or "
"user_name=%s order by user_id",
*multiparam, **param)
assert res.fetchall() == [
(1, 'jack'),
(2, 'ed')
]
res = conn.execute("select * from users where user_name=%s",
"jack"
)
assert res.fetchall() == [(1, 'jack')]
conn.execute('delete from users')
go(testing.db)
conn = testing.db.connect()
try:
go(conn)
finally:
conn.close()
# pyformat is supported for mysql, but skipping because a few driver
# versions have a bug that bombs out on this test. (1.2.2b3,
# 1.2.2c1, 1.2.2)
@testing.skip_if(lambda : testing.against('mysql+mysqldb'),
'db-api flaky')
@testing.fails_on_everything_except('postgresql+psycopg2',
'postgresql+pypostgresql', 'mysql+mysqlconnector',
'mysql+pymysql', 'mysql+cymysql')
def test_raw_python(self):
def go(conn):
conn.execute('insert into users (user_id, user_name) '
'values (%(id)s, %(name)s)', {'id': 1, 'name'
: 'jack'})
conn.execute('insert into users (user_id, user_name) '
'values (%(id)s, %(name)s)', {'id': 2, 'name'
: 'ed'}, {'id': 3, 'name': 'horse'})
conn.execute('insert into users (user_id, user_name) '
'values (%(id)s, %(name)s)', id=4, name='sally'
)
res = conn.execute('select * from users order by user_id')
assert res.fetchall() == [(1, 'jack'), (2, 'ed'), (3,
'horse'), (4, 'sally')]
conn.execute('delete from users')
go(testing.db)
conn = testing.db.connect()
try:
go(conn)
finally:
conn.close()
@testing.fails_on_everything_except('sqlite', 'oracle+cx_oracle')
def test_raw_named(self):
def go(conn):
conn.execute('insert into users (user_id, user_name) '
'values (:id, :name)', {'id': 1, 'name': 'jack'
})
conn.execute('insert into users (user_id, user_name) '
'values (:id, :name)', {'id': 2, 'name': 'ed'
}, {'id': 3, 'name': 'horse'})
conn.execute('insert into users (user_id, user_name) '
'values (:id, :name)', id=4, name='sally')
res = conn.execute('select * from users order by user_id')
assert res.fetchall() == [(1, 'jack'), (2, 'ed'), (3,
'horse'), (4, 'sally')]
conn.execute('delete from users')
go(testing.db)
conn= testing.db.connect()
try:
go(conn)
finally:
conn.close()
@testing.engines.close_open_connections
def test_exception_wrapping_dbapi(self):
conn = testing.db.connect()
for _c in testing.db, conn:
assert_raises_message(
tsa.exc.DBAPIError,
r"not_a_valid_statement",
_c.execute, 'not_a_valid_statement'
)
@testing.requires.sqlite
def test_exception_wrapping_non_dbapi_error(self):
e = create_engine('sqlite://')
e.dialect.is_disconnect = is_disconnect = Mock()
with e.connect() as c:
c.connection.cursor = Mock(
return_value=Mock(
execute=Mock(
side_effect=TypeError("I'm not a DBAPI error")
))
)
assert_raises_message(
TypeError,
"I'm not a DBAPI error",
c.execute, "select "
)
eq_(is_disconnect.call_count, 0)
def test_exception_wrapping_non_dbapi_statement(self):
class MyType(TypeDecorator):
impl = Integer
def process_bind_param(self, value, dialect):
raise Exception("nope")
def _go(conn):
assert_raises_message(
tsa.exc.StatementError,
r"nope \(original cause: Exception: nope\) u?'SELECT 1 ",
conn.execute,
select([1]).\
where(
column('foo') == literal('bar', MyType())
)
)
_go(testing.db)
conn = testing.db.connect()
try:
_go(conn)
finally:
conn.close()
def test_stmt_exception_non_ascii(self):
name = util.u('méil')
with testing.db.connect() as conn:
assert_raises_message(
tsa.exc.StatementError,
util.u(
"A value is required for bind parameter 'uname'"
r'.*SELECT users.user_name AS .m\\xe9il.') if util.py2k
else
util.u(
"A value is required for bind parameter 'uname'"
'.*SELECT users.user_name AS .méil.')
,
conn.execute,
select([users.c.user_name.label(name)]).where(
users.c.user_name == bindparam("uname")),
{'uname_incorrect': 'foo'}
)
def test_stmt_exception_pickleable_no_dbapi(self):
self._test_stmt_exception_pickleable(Exception("hello world"))
@testing.crashes("postgresql+psycopg2",
"Older versions don't support cursor pickling, newer ones do")
@testing.fails_on("mysql+oursql",
"Exception doesn't come back exactly the same from pickle")
@testing.fails_on("mysql+mysqlconnector",
"Exception doesn't come back exactly the same from pickle")
@testing.fails_on("oracle+cx_oracle",
"cx_oracle exception seems to be having "
"some issue with pickling")
def test_stmt_exception_pickleable_plus_dbapi(self):
raw = testing.db.raw_connection()
the_orig = None
try:
try:
cursor = raw.cursor()
cursor.execute("SELECTINCORRECT")
except testing.db.dialect.dbapi.DatabaseError as orig:
# py3k has "orig" in local scope...
the_orig = orig
finally:
raw.close()
self._test_stmt_exception_pickleable(the_orig)
def _test_stmt_exception_pickleable(self, orig):
for sa_exc in (
tsa.exc.StatementError("some error",
"select * from table",
{"foo":"bar"},
orig),
tsa.exc.InterfaceError("select * from table",
{"foo":"bar"},
orig),
tsa.exc.NoReferencedTableError("message", "tname"),
tsa.exc.NoReferencedColumnError("message", "tname", "cname"),
tsa.exc.CircularDependencyError("some message", [1, 2, 3], [(1, 2), (3, 4)]),
):
for loads, dumps in picklers():
repickled = loads(dumps(sa_exc))
eq_(repickled.args[0], sa_exc.args[0])
if isinstance(sa_exc, tsa.exc.StatementError):
eq_(repickled.params, {"foo":"bar"})
eq_(repickled.statement, sa_exc.statement)
if hasattr(sa_exc, "connection_invalidated"):
eq_(repickled.connection_invalidated,
sa_exc.connection_invalidated)
eq_(repickled.orig.args[0], orig.args[0])
def test_dont_wrap_mixin(self):
class MyException(Exception, tsa.exc.DontWrapMixin):
pass
class MyType(TypeDecorator):
impl = Integer
def process_bind_param(self, value, dialect):
raise MyException("nope")
def _go(conn):
assert_raises_message(
MyException,
"nope",
conn.execute,
select([1]).\
where(
column('foo') == literal('bar', MyType())
)
)
_go(testing.db)
conn = testing.db.connect()
try:
_go(conn)
finally:
conn.close()
def test_empty_insert(self):
"""test that execute() interprets [] as a list with no params"""
testing.db.execute(users_autoinc.insert().
values(user_name=bindparam('name', None)), [])
eq_(testing.db.execute(users_autoinc.select()).fetchall(), [(1, None)])
@testing.requires.ad_hoc_engines
def test_engine_level_options(self):
eng = engines.testing_engine(options={'execution_options':
{'foo': 'bar'}})
with eng.contextual_connect() as conn:
eq_(conn._execution_options['foo'], 'bar')
eq_(conn.execution_options(bat='hoho')._execution_options['foo'
], 'bar')
eq_(conn.execution_options(bat='hoho')._execution_options['bat'
], 'hoho')
eq_(conn.execution_options(foo='hoho')._execution_options['foo'
], 'hoho')
eng.update_execution_options(foo='hoho')
conn = eng.contextual_connect()
eq_(conn._execution_options['foo'], 'hoho')
@testing.requires.ad_hoc_engines
def test_generative_engine_execution_options(self):
eng = engines.testing_engine(options={'execution_options':
{'base': 'x1'}})
eng1 = eng.execution_options(foo="b1")
eng2 = eng.execution_options(foo="b2")
eng1a = eng1.execution_options(bar="a1")
eng2a = eng2.execution_options(foo="b3", bar="a2")
eq_(eng._execution_options,
{'base': 'x1'})
eq_(eng1._execution_options,
{'base': 'x1', 'foo': 'b1'})
eq_(eng2._execution_options,
{'base': 'x1', 'foo': 'b2'})
eq_(eng1a._execution_options,
{'base': 'x1', 'foo': 'b1', 'bar': 'a1'})
eq_(eng2a._execution_options,
{'base': 'x1', 'foo': 'b3', 'bar': 'a2'})
is_(eng1a.pool, eng.pool)
# test pool is shared
eng2.dispose()
is_(eng1a.pool, eng2.pool)
is_(eng.pool, eng2.pool)
@testing.requires.ad_hoc_engines
def test_generative_engine_event_dispatch(self):
canary = []
def l1(*arg, **kw):
canary.append("l1")
def l2(*arg, **kw):
canary.append("l2")
def l3(*arg, **kw):
canary.append("l3")
eng = engines.testing_engine(options={'execution_options':
{'base': 'x1'}})
event.listen(eng, "before_execute", l1)
eng1 = eng.execution_options(foo="b1")
event.listen(eng, "before_execute", l2)
event.listen(eng1, "before_execute", l3)
eng.execute(select([1])).close()
eng1.execute(select([1])).close()
eq_(canary, ["l1", "l2", "l3", "l1", "l2"])
@testing.requires.ad_hoc_engines
def test_generative_engine_event_dispatch_hasevents(self):
def l1(*arg, **kw):
pass
eng = create_engine(testing.db.url)
assert not eng._has_events
event.listen(eng, "before_execute", l1)
eng2 = eng.execution_options(foo='bar')
assert eng2._has_events
def test_unicode_test_fails_warning(self):
class MockCursor(engines.DBAPIProxyCursor):
def execute(self, stmt, params=None, **kw):
if "test unicode returns" in stmt:
raise self.engine.dialect.dbapi.DatabaseError("boom")
else:
return super(MockCursor, self).execute(stmt, params, **kw)
eng = engines.proxying_engine(cursor_cls=MockCursor)
assert_raises_message(
tsa.exc.SAWarning,
"Exception attempting to detect unicode returns",
eng.connect
)
assert eng.dialect.returns_unicode_strings in (True, False)
eng.dispose()
def test_works_after_dispose(self):
eng = create_engine(testing.db.url)
for i in range(3):
eq_(eng.scalar(select([1])), 1)
eng.dispose()
def test_works_after_dispose_testing_engine(self):
eng = engines.testing_engine()
for i in range(3):
eq_(eng.scalar(select([1])), 1)
eng.dispose()
class ConvenienceExecuteTest(fixtures.TablesTest):
__backend__ = True
@classmethod
def define_tables(cls, metadata):
cls.table = Table('exec_test', metadata,
Column('a', Integer),
Column('b', Integer),
test_needs_acid=True
)
def _trans_fn(self, is_transaction=False):
def go(conn, x, value=None):
if is_transaction:
conn = conn.connection
conn.execute(self.table.insert().values(a=x, b=value))
return go
def _trans_rollback_fn(self, is_transaction=False):
def go(conn, x, value=None):
if is_transaction:
conn = conn.connection
conn.execute(self.table.insert().values(a=x, b=value))
raise Exception("breakage")
return go
def _assert_no_data(self):
eq_(
testing.db.scalar(self.table.count()), 0
)
def _assert_fn(self, x, value=None):
eq_(
testing.db.execute(self.table.select()).fetchall(),
[(x, value)]
)
def test_transaction_engine_ctx_commit(self):
fn = self._trans_fn()
ctx = testing.db.begin()
testing.run_as_contextmanager(ctx, fn, 5, value=8)
self._assert_fn(5, value=8)
def test_transaction_engine_ctx_begin_fails(self):
engine = engines.testing_engine()
mock_connection = Mock(
return_value=Mock(
begin=Mock(side_effect=Exception("boom"))
)
)
engine._connection_cls = mock_connection
assert_raises(
Exception,
engine.begin
)
eq_(
mock_connection.return_value.close.mock_calls,
[call()]
)
def test_transaction_engine_ctx_rollback(self):
fn = self._trans_rollback_fn()
ctx = testing.db.begin()
assert_raises_message(
Exception,
"breakage",
testing.run_as_contextmanager, ctx, fn, 5, value=8
)
self._assert_no_data()
def test_transaction_tlocal_engine_ctx_commit(self):
fn = self._trans_fn()
engine = engines.testing_engine(options=dict(
strategy='threadlocal',
pool=testing.db.pool))
ctx = engine.begin()
testing.run_as_contextmanager(ctx, fn, 5, value=8)
self._assert_fn(5, value=8)
def test_transaction_tlocal_engine_ctx_rollback(self):
fn = self._trans_rollback_fn()
engine = engines.testing_engine(options=dict(
strategy='threadlocal',
pool=testing.db.pool))
ctx = engine.begin()
assert_raises_message(
Exception,
"breakage",
testing.run_as_contextmanager, ctx, fn, 5, value=8
)
self._assert_no_data()
def test_transaction_connection_ctx_commit(self):
fn = self._trans_fn(True)
conn = testing.db.connect()
ctx = conn.begin()
testing.run_as_contextmanager(ctx, fn, 5, value=8)
self._assert_fn(5, value=8)
def test_transaction_connection_ctx_rollback(self):
fn = self._trans_rollback_fn(True)
conn = testing.db.connect()
ctx = conn.begin()
assert_raises_message(
Exception,
"breakage",
testing.run_as_contextmanager, ctx, fn, 5, value=8
)
self._assert_no_data()
def test_connection_as_ctx(self):
fn = self._trans_fn()
ctx = testing.db.connect()
testing.run_as_contextmanager(ctx, fn, 5, value=8)
# autocommit is on
self._assert_fn(5, value=8)
@testing.fails_on('mysql+oursql', "oursql bug ? getting wrong rowcount")
def test_connect_as_ctx_noautocommit(self):
fn = self._trans_fn()
self._assert_no_data()
ctx = testing.db.connect().execution_options(autocommit=False)
testing.run_as_contextmanager(ctx, fn, 5, value=8)
# autocommit is off
self._assert_no_data()
def test_transaction_engine_fn_commit(self):
fn = self._trans_fn()
testing.db.transaction(fn, 5, value=8)
self._assert_fn(5, value=8)
def test_transaction_engine_fn_rollback(self):
fn = self._trans_rollback_fn()
assert_raises_message(
Exception,
"breakage",
testing.db.transaction, fn, 5, value=8
)
self._assert_no_data()
def test_transaction_connection_fn_commit(self):
fn = self._trans_fn()
conn = testing.db.connect()
conn.transaction(fn, 5, value=8)
self._assert_fn(5, value=8)
def test_transaction_connection_fn_rollback(self):
fn = self._trans_rollback_fn()
conn = testing.db.connect()
assert_raises(
Exception,
conn.transaction, fn, 5, value=8
)
self._assert_no_data()
class CompiledCacheTest(fixtures.TestBase):
__backend__ = True
@classmethod
def setup_class(cls):
global users, metadata
metadata = MetaData(testing.db)
users = Table('users', metadata,
Column('user_id', INT, primary_key=True,
test_needs_autoincrement=True),
Column('user_name', VARCHAR(20)),
)
metadata.create_all()
@engines.close_first
def teardown(self):
testing.db.execute(users.delete())
@classmethod
def teardown_class(cls):
metadata.drop_all()
def test_cache(self):
conn = testing.db.connect()
cache = {}
cached_conn = conn.execution_options(compiled_cache=cache)
ins = users.insert()
cached_conn.execute(ins, {'user_name':'u1'})
cached_conn.execute(ins, {'user_name':'u2'})
cached_conn.execute(ins, {'user_name':'u3'})
assert len(cache) == 1
eq_(conn.execute("select count(*) from users").scalar(), 3)
class MockStrategyTest(fixtures.TestBase):
def _engine_fixture(self):
buf = util.StringIO()
def dump(sql, *multiparams, **params):
buf.write(util.text_type(sql.compile(dialect=engine.dialect)))
engine = create_engine('postgresql://', strategy='mock', executor=dump)
return engine, buf
def test_sequence_not_duped(self):
engine, buf = self._engine_fixture()
metadata = MetaData()
t = Table('testtable', metadata,
Column('pk', Integer, Sequence('testtable_pk_seq'), primary_key=True)
)
t.create(engine)
t.drop(engine)
eq_(
re.findall(r'CREATE (\w+)', buf.getvalue()),
["SEQUENCE", "TABLE"]
)
eq_(
re.findall(r'DROP (\w+)', buf.getvalue()),
["SEQUENCE", "TABLE"]
)
class ResultProxyTest(fixtures.TestBase):
__backend__ = True
def test_nontuple_row(self):
"""ensure the C version of BaseRowProxy handles
duck-type-dependent rows."""
from sqlalchemy.engine import RowProxy
class MyList(object):
def __init__(self, l):
self.l = l
def __len__(self):
return len(self.l)
def __getitem__(self, i):
return list.__getitem__(self.l, i)
proxy = RowProxy(object(), MyList(['value']), [None], {'key'
: (None, None, 0), 0: (None, None, 0)})
eq_(list(proxy), ['value'])
eq_(proxy[0], 'value')
eq_(proxy['key'], 'value')
@testing.provide_metadata
def test_no_rowcount_on_selects_inserts(self):
"""assert that rowcount is only called on deletes and updates.
This because cursor.rowcount may can be expensive on some dialects
such as Firebird, however many dialects require it be called
before the cursor is closed.
"""
metadata = self.metadata
engine = engines.testing_engine()
t = Table('t1', metadata,
Column('data', String(10))
)
metadata.create_all(engine)
with patch.object(engine.dialect.execution_ctx_cls, "rowcount") as mock_rowcount:
mock_rowcount.__get__ = Mock()
engine.execute(t.insert(),
{'data': 'd1'},
{'data': 'd2'},
{'data': 'd3'})
eq_(len(mock_rowcount.__get__.mock_calls), 0)
eq_(
engine.execute(t.select()).fetchall(),
[('d1', ), ('d2', ), ('d3', )]
)
eq_(len(mock_rowcount.__get__.mock_calls), 0)
engine.execute(t.update(), {'data': 'd4'})
eq_(len(mock_rowcount.__get__.mock_calls), 1)
engine.execute(t.delete())
eq_(len(mock_rowcount.__get__.mock_calls), 2)
def test_rowproxy_is_sequence(self):
import collections
from sqlalchemy.engine import RowProxy
row = RowProxy(object(), ['value'], [None], {'key'
: (None, None, 0), 0: (None, None, 0)})
assert isinstance(row, collections.Sequence)
@testing.requires.cextensions
def test_row_c_sequence_check(self):
import csv
import collections
metadata = MetaData()
metadata.bind = 'sqlite://'
users = Table('users', metadata,
Column('id', Integer, primary_key=True),
Column('name', String(40)),
)
users.create()
users.insert().execute(name='Test')
row = users.select().execute().fetchone()
s = util.StringIO()
writer = csv.writer(s)
# csv performs PySequenceCheck call
writer.writerow(row)
assert s.getvalue().strip() == '1,Test'
@testing.requires.selectone
def test_empty_accessors(self):
statements = [
(
"select 1",
[
lambda r: r.last_inserted_params(),
lambda r: r.last_updated_params(),
lambda r: r.prefetch_cols(),
lambda r: r.postfetch_cols(),
lambda r : r.inserted_primary_key
],
"Statement is not a compiled expression construct."
),
(
select([1]),
[
lambda r: r.last_inserted_params(),
lambda r : r.inserted_primary_key
],
r"Statement is not an insert\(\) expression construct."
),
(
select([1]),
[
lambda r: r.last_updated_params(),
],
r"Statement is not an update\(\) expression construct."
),
(
select([1]),
[
lambda r: r.prefetch_cols(),
lambda r : r.postfetch_cols()
],
r"Statement is not an insert\(\) "
r"or update\(\) expression construct."
),
]
for stmt, meths, msg in statements:
r = testing.db.execute(stmt)
try:
for meth in meths:
assert_raises_message(
tsa.exc.InvalidRequestError,
msg,
meth, r
)
finally:
r.close()
class ExecutionOptionsTest(fixtures.TestBase):
def test_dialect_conn_options(self):
engine = testing_engine("sqlite://", options=dict(_initialize=False))
engine.dialect = Mock()
conn = engine.connect()
c2 = conn.execution_options(foo="bar")
eq_(
engine.dialect.set_connection_execution_options.mock_calls,
[call(c2, {"foo": "bar"})]
)
def test_dialect_engine_options(self):
engine = testing_engine("sqlite://")
engine.dialect = Mock()
e2 = engine.execution_options(foo="bar")
eq_(
engine.dialect.set_engine_execution_options.mock_calls,
[call(e2, {"foo": "bar"})]
)
def test_dialect_engine_construction_options(self):
dialect = Mock()
engine = Engine(Mock(), dialect, Mock(),
execution_options={"foo": "bar"})
eq_(
dialect.set_engine_execution_options.mock_calls,
[call(engine, {"foo": "bar"})]
)
def test_propagate_engine_to_connection(self):
engine = testing_engine("sqlite://",
options=dict(execution_options={"foo": "bar"}))
conn = engine.connect()
eq_(conn._execution_options, {"foo": "bar"})
def test_propagate_option_engine_to_connection(self):
e1 = testing_engine("sqlite://",
options=dict(execution_options={"foo": "bar"}))
e2 = e1.execution_options(bat="hoho")
c1 = e1.connect()
c2 = e2.connect()
eq_(c1._execution_options, {"foo": "bar"})
eq_(c2._execution_options, {"foo": "bar", "bat": "hoho"})
class AlternateResultProxyTest(fixtures.TestBase):
__requires__ = ('sqlite', )
@classmethod
def setup_class(cls):
from sqlalchemy.engine import base, default
cls.engine = engine = testing_engine('sqlite://')
m = MetaData()
cls.table = t = Table('test', m,
Column('x', Integer, primary_key=True),
Column('y', String(50, convert_unicode='force'))
)
m.create_all(engine)
engine.execute(t.insert(), [
{'x':i, 'y':"t_%d" % i} for i in range(1, 12)
])
def _test_proxy(self, cls):
class ExcCtx(default.DefaultExecutionContext):
def get_result_proxy(self):
return cls(self)
self.engine.dialect.execution_ctx_cls = ExcCtx
rows = []
r = self.engine.execute(select([self.table]))
assert isinstance(r, cls)
for i in range(5):
rows.append(r.fetchone())
eq_(rows, [(i, "t_%d" % i) for i in range(1, 6)])
rows = r.fetchmany(3)
eq_(rows, [(i, "t_%d" % i) for i in range(6, 9)])
rows = r.fetchall()
eq_(rows, [(i, "t_%d" % i) for i in range(9, 12)])
r = self.engine.execute(select([self.table]))
rows = r.fetchmany(None)
eq_(rows[0], (1, "t_1"))
# number of rows here could be one, or the whole thing
assert len(rows) == 1 or len(rows) == 11
r = self.engine.execute(select([self.table]).limit(1))
r.fetchone()
eq_(r.fetchone(), None)
r = self.engine.execute(select([self.table]).limit(5))
rows = r.fetchmany(6)
eq_(rows, [(i, "t_%d" % i) for i in range(1, 6)])
def test_plain(self):
self._test_proxy(_result.ResultProxy)
def test_buffered_row_result_proxy(self):
self._test_proxy(_result.BufferedRowResultProxy)
def test_fully_buffered_result_proxy(self):
self._test_proxy(_result.FullyBufferedResultProxy)
def test_buffered_column_result_proxy(self):
self._test_proxy(_result.BufferedColumnResultProxy)
class EngineEventsTest(fixtures.TestBase):
__requires__ = 'ad_hoc_engines',
__backend__ = True
def tearDown(self):
Engine.dispatch._clear()
Engine._has_events = False
def _assert_stmts(self, expected, received):
orig = list(received)
for stmt, params, posn in expected:
if not received:
assert False, "Nothing available for stmt: %s" % stmt
while received:
teststmt, testparams, testmultiparams = \
received.pop(0)
teststmt = re.compile(r'[\n\t ]+', re.M).sub(' ',
teststmt).strip()
if teststmt.startswith(stmt) and (testparams
== params or testparams == posn):
break
def test_per_engine_independence(self):
e1 = testing_engine(config.db_url)
e2 = testing_engine(config.db_url)
canary = Mock()
event.listen(e1, "before_execute", canary)
s1 = select([1])
s2 = select([2])
e1.execute(s1)
e2.execute(s2)
eq_(
[arg[1][1] for arg in canary.mock_calls], [s1]
)
event.listen(e2, "before_execute", canary)
e1.execute(s1)
e2.execute(s2)
eq_([arg[1][1] for arg in canary.mock_calls], [s1, s1, s2])
def test_per_engine_plus_global(self):
canary = Mock()
event.listen(Engine, "before_execute", canary.be1)
e1 = testing_engine(config.db_url)
e2 = testing_engine(config.db_url)
event.listen(e1, "before_execute", canary.be2)
event.listen(Engine, "before_execute", canary.be3)
e1.connect()
e2.connect()
e1.execute(select([1]))
eq_(canary.be1.call_count, 1)
eq_(canary.be2.call_count, 1)
e2.execute(select([1]))
eq_(canary.be1.call_count, 2)
eq_(canary.be2.call_count, 1)
eq_(canary.be3.call_count, 2)
def test_per_connection_plus_engine(self):
canary = Mock()
e1 = testing_engine(config.db_url)
event.listen(e1, "before_execute", canary.be1)
conn = e1.connect()
event.listen(conn, "before_execute", canary.be2)
conn.execute(select([1]))
eq_(canary.be1.call_count, 1)
eq_(canary.be2.call_count, 1)
conn._branch().execute(select([1]))
eq_(canary.be1.call_count, 2)
eq_(canary.be2.call_count, 2)
def test_add_event_after_connect(self):
# new feature as of #2978
canary = Mock()
e1 = create_engine(config.db_url)
assert not e1._has_events
conn = e1.connect()
event.listen(e1, "before_execute", canary.be1)
conn.execute(select([1]))
eq_(canary.be1.call_count, 1)
conn._branch().execute(select([1]))
eq_(canary.be1.call_count, 2)
def test_force_conn_events_false(self):
canary = Mock()
e1 = create_engine(config.db_url)
assert not e1._has_events
event.listen(e1, "before_execute", canary.be1)
conn = e1._connection_cls(e1, connection=e1.raw_connection(),
_has_events=False)
conn.execute(select([1]))
eq_(canary.be1.call_count, 0)
conn._branch().execute(select([1]))
eq_(canary.be1.call_count, 0)
def test_cursor_events_ctx_execute_scalar(self):
canary = Mock()
e1 = testing_engine(config.db_url)
event.listen(e1, "before_cursor_execute", canary.bce)
event.listen(e1, "after_cursor_execute", canary.ace)
stmt = str(select([1]).compile(dialect=e1.dialect))
with e1.connect() as conn:
dialect = conn.dialect
ctx = dialect.execution_ctx_cls._init_statement(
dialect, conn, conn.connection, stmt, {})
ctx._execute_scalar(stmt, Integer())
eq_(canary.bce.mock_calls,
[call(conn, ctx.cursor, stmt, ctx.parameters[0], ctx, False)])
eq_(canary.ace.mock_calls,
[call(conn, ctx.cursor, stmt, ctx.parameters[0], ctx, False)])
def test_cursor_events_execute(self):
canary = Mock()
e1 = testing_engine(config.db_url)
event.listen(e1, "before_cursor_execute", canary.bce)
event.listen(e1, "after_cursor_execute", canary.ace)
stmt = str(select([1]).compile(dialect=e1.dialect))
with e1.connect() as conn:
result = conn.execute(stmt)
ctx = result.context
eq_(canary.bce.mock_calls,
[call(conn, ctx.cursor, stmt, ctx.parameters[0], ctx, False)])
eq_(canary.ace.mock_calls,
[call(conn, ctx.cursor, stmt, ctx.parameters[0], ctx, False)])
def test_argument_format_execute(self):
def before_execute(conn, clauseelement, multiparams, params):
assert isinstance(multiparams, (list, tuple))
assert isinstance(params, dict)
def after_execute(conn, clauseelement, multiparams, params, result):
assert isinstance(multiparams, (list, tuple))
assert isinstance(params, dict)
e1 = testing_engine(config.db_url)
event.listen(e1, 'before_execute', before_execute)
event.listen(e1, 'after_execute', after_execute)
e1.execute(select([1]))
e1.execute(select([1]).compile(dialect=e1.dialect).statement)
e1.execute(select([1]).compile(dialect=e1.dialect))
e1._execute_compiled(select([1]).compile(dialect=e1.dialect), (), {})
@testing.fails_on('firebird', 'Data type unknown')
def test_execute_events(self):
stmts = []
cursor_stmts = []
def execute(conn, clauseelement, multiparams,
params ):
stmts.append((str(clauseelement), params, multiparams))
def cursor_execute(conn, cursor, statement, parameters,
context, executemany):
cursor_stmts.append((str(statement), parameters, None))
for engine in [
engines.testing_engine(options=dict(implicit_returning=False)),
engines.testing_engine(options=dict(implicit_returning=False,
strategy='threadlocal')),
engines.testing_engine(options=dict(implicit_returning=False)).\
connect()
]:
event.listen(engine, 'before_execute', execute)
event.listen(engine, 'before_cursor_execute', cursor_execute)
m = MetaData(engine)
t1 = Table('t1', m,
Column('c1', Integer, primary_key=True),
Column('c2', String(50), default=func.lower('Foo'),
primary_key=True)
)
m.create_all()
try:
t1.insert().execute(c1=5, c2='some data')
t1.insert().execute(c1=6)
eq_(engine.execute('select * from t1').fetchall(), [(5,
'some data'), (6, 'foo')])
finally:
m.drop_all()
compiled = [('CREATE TABLE t1', {}, None),
('INSERT INTO t1 (c1, c2)',
{'c2': 'some data', 'c1': 5}, None),
('INSERT INTO t1 (c1, c2)',
{'c1': 6}, None),
('select * from t1', {}, None),
('DROP TABLE t1', {}, None)]
# or engine.dialect.preexecute_pk_sequences:
if not testing.against('oracle+zxjdbc'):
cursor = [
('CREATE TABLE t1', {}, ()),
('INSERT INTO t1 (c1, c2)', {
'c2': 'some data', 'c1': 5},
(5, 'some data')),
('SELECT lower', {'lower_2': 'Foo'},
('Foo', )),
('INSERT INTO t1 (c1, c2)',
{'c2': 'foo', 'c1': 6},
(6, 'foo')),
('select * from t1', {}, ()),
('DROP TABLE t1', {}, ()),
]
else:
insert2_params = 6, 'Foo'
if testing.against('oracle+zxjdbc'):
insert2_params += (ReturningParam(12), )
cursor = [('CREATE TABLE t1', {}, ()),
('INSERT INTO t1 (c1, c2)',
{'c2': 'some data', 'c1': 5}, (5, 'some data')),
('INSERT INTO t1 (c1, c2)', {'c1': 6,
'lower_2': 'Foo'}, insert2_params),
('select * from t1', {}, ()),
('DROP TABLE t1', {}, ())]
# bind param name 'lower_2' might
# be incorrect
self._assert_stmts(compiled, stmts)
self._assert_stmts(cursor, cursor_stmts)
def test_options(self):
canary = []
def execute(conn, *args, **kw):
canary.append('execute')
def cursor_execute(conn, *args, **kw):
canary.append('cursor_execute')
engine = engines.testing_engine()
event.listen(engine, 'before_execute', execute)
event.listen(engine, 'before_cursor_execute', cursor_execute)
conn = engine.connect()
c2 = conn.execution_options(foo='bar')
eq_(c2._execution_options, {'foo':'bar'})
c2.execute(select([1]))
c3 = c2.execution_options(bar='bat')
eq_(c3._execution_options, {'foo':'bar', 'bar':'bat'})
eq_(canary, ['execute', 'cursor_execute'])
def test_retval_flag(self):
canary = []
def tracker(name):
def go(conn, *args, **kw):
canary.append(name)
return go
def execute(conn, clauseelement, multiparams, params):
canary.append('execute')
return clauseelement, multiparams, params
def cursor_execute(conn, cursor, statement,
parameters, context, executemany):
canary.append('cursor_execute')
return statement, parameters
engine = engines.testing_engine()
assert_raises(
tsa.exc.ArgumentError,
event.listen, engine, "begin", tracker("begin"), retval=True
)
event.listen(engine, "before_execute", execute, retval=True)
event.listen(engine, "before_cursor_execute", cursor_execute, retval=True)
engine.execute(select([1]))
eq_(
canary, ['execute', 'cursor_execute']
)
def test_engine_connect(self):
engine = engines.testing_engine()
tracker = Mock()
event.listen(engine, "engine_connect", tracker)
c1 = engine.connect()
c2 = c1._branch()
c1.close()
eq_(
tracker.mock_calls,
[call(c1, False), call(c2, True)]
)
def test_execution_options(self):
engine = engines.testing_engine()
engine_tracker = Mock()
conn_tracker = Mock()
event.listen(engine, "set_engine_execution_options", engine_tracker)
event.listen(engine, "set_connection_execution_options", conn_tracker)
e2 = engine.execution_options(e1='opt_e1')
c1 = engine.connect()
c2 = c1.execution_options(c1='opt_c1')
c3 = e2.connect()
c4 = c3.execution_options(c3='opt_c3')
eq_(
engine_tracker.mock_calls,
[call(e2, {'e1': 'opt_e1'})]
)
eq_(
conn_tracker.mock_calls,
[call(c2, {"c1": "opt_c1"}), call(c4, {"c3": "opt_c3"})]
)
@testing.requires.sequences
@testing.provide_metadata
def test_cursor_execute(self):
canary = []
def tracker(name):
def go(conn, cursor, statement, parameters, context, executemany):
canary.append((statement, context))
return go
engine = engines.testing_engine()
t = Table('t', self.metadata,
Column('x', Integer, Sequence('t_id_seq'), primary_key=True),
implicit_returning=False
)
self.metadata.create_all(engine)
with engine.begin() as conn:
event.listen(conn, 'before_cursor_execute', tracker('cursor_execute'))
conn.execute(t.insert())
# we see the sequence pre-executed in the first call
assert "t_id_seq" in canary[0][0]
assert "INSERT" in canary[1][0]
# same context
is_(
canary[0][1], canary[1][1]
)
def test_transactional(self):
canary = []
def tracker(name):
def go(conn, *args, **kw):
canary.append(name)
return go
engine = engines.testing_engine()
event.listen(engine, 'before_execute', tracker('execute'))
event.listen(engine, 'before_cursor_execute', tracker('cursor_execute'))
event.listen(engine, 'begin', tracker('begin'))
event.listen(engine, 'commit', tracker('commit'))
event.listen(engine, 'rollback', tracker('rollback'))
conn = engine.connect()
trans = conn.begin()
conn.execute(select([1]))
trans.rollback()
trans = conn.begin()
conn.execute(select([1]))
trans.commit()
eq_(canary, [
'begin', 'execute', 'cursor_execute', 'rollback',
'begin', 'execute', 'cursor_execute', 'commit',
])
@testing.requires.savepoints
@testing.requires.two_phase_transactions
def test_transactional_advanced(self):
canary1 = []
def tracker1(name):
def go(*args, **kw):
canary1.append(name)
return go
canary2 = []
def tracker2(name):
def go(*args, **kw):
canary2.append(name)
return go
engine = engines.testing_engine()
for name in ['begin', 'savepoint',
'rollback_savepoint', 'release_savepoint',
'rollback', 'begin_twophase',
'prepare_twophase', 'commit_twophase']:
event.listen(engine, '%s' % name, tracker1(name))
conn = engine.connect()
for name in ['begin', 'savepoint',
'rollback_savepoint', 'release_savepoint',
'rollback', 'begin_twophase',
'prepare_twophase', 'commit_twophase']:
event.listen(conn, '%s' % name, tracker2(name))
trans = conn.begin()
trans2 = conn.begin_nested()
conn.execute(select([1]))
trans2.rollback()
trans2 = conn.begin_nested()
conn.execute(select([1]))
trans2.commit()
trans.rollback()
trans = conn.begin_twophase()
conn.execute(select([1]))
trans.prepare()
trans.commit()
eq_(canary1, ['begin', 'savepoint',
'rollback_savepoint', 'savepoint', 'release_savepoint',
'rollback', 'begin_twophase',
'prepare_twophase', 'commit_twophase']
)
eq_(canary2, ['begin', 'savepoint',
'rollback_savepoint', 'savepoint', 'release_savepoint',
'rollback', 'begin_twophase',
'prepare_twophase', 'commit_twophase']
)
class HandleErrorTest(fixtures.TestBase):
__requires__ = 'ad_hoc_engines',
__backend__ = True
def tearDown(self):
Engine.dispatch._clear()
Engine._has_events = False
def test_legacy_dbapi_error(self):
engine = engines.testing_engine()
canary = Mock()
event.listen(engine, "dbapi_error", canary)
with engine.connect() as conn:
try:
conn.execute("SELECT FOO FROM I_DONT_EXIST")
assert False
except tsa.exc.DBAPIError as e:
eq_(canary.mock_calls[0][1][5], e.orig)
eq_(canary.mock_calls[0][1][2], "SELECT FOO FROM I_DONT_EXIST")
def test_legacy_dbapi_error_no_ad_hoc_context(self):
engine = engines.testing_engine()
listener = Mock(return_value=None)
event.listen(engine, 'dbapi_error', listener)
nope = Exception("nope")
class MyType(TypeDecorator):
impl = Integer
def process_bind_param(self, value, dialect):
raise nope
with engine.connect() as conn:
assert_raises_message(
tsa.exc.StatementError,
r"nope \(original cause: Exception: nope\) u?'SELECT 1 ",
conn.execute,
select([1]).where(
column('foo') == literal('bar', MyType()))
)
# no legacy event
eq_(listener.mock_calls, [])
def test_legacy_dbapi_error_non_dbapi_error(self):
engine = engines.testing_engine()
listener = Mock(return_value=None)
event.listen(engine, 'dbapi_error', listener)
nope = TypeError("I'm not a DBAPI error")
with engine.connect() as c:
c.connection.cursor = Mock(
return_value=Mock(
execute=Mock(
side_effect=nope
))
)
assert_raises_message(
TypeError,
"I'm not a DBAPI error",
c.execute, "select "
)
# no legacy event
eq_(listener.mock_calls, [])
def test_handle_error(self):
engine = engines.testing_engine()
canary = Mock(return_value=None)
event.listen(engine, "handle_error", canary)
with engine.connect() as conn:
try:
conn.execute("SELECT FOO FROM I_DONT_EXIST")
assert False
except tsa.exc.DBAPIError as e:
ctx = canary.mock_calls[0][1][0]
eq_(ctx.original_exception, e.orig)
is_(ctx.sqlalchemy_exception, e)
eq_(ctx.statement, "SELECT FOO FROM I_DONT_EXIST")
def test_exception_event_reraise(self):
engine = engines.testing_engine()
class MyException(Exception):
pass
@event.listens_for(engine, 'handle_error', retval=True)
def err(context):
stmt = context.statement
exception = context.original_exception
if "ERROR ONE" in str(stmt):
return MyException("my exception")
elif "ERROR TWO" in str(stmt):
return exception
else:
return None
conn = engine.connect()
# case 1: custom exception
assert_raises_message(
MyException,
"my exception",
conn.execute, "SELECT 'ERROR ONE' FROM I_DONT_EXIST"
)
# case 2: return the DBAPI exception we're given;
# no wrapping should occur
assert_raises(
conn.dialect.dbapi.Error,
conn.execute, "SELECT 'ERROR TWO' FROM I_DONT_EXIST"
)
# case 3: normal wrapping
assert_raises(
tsa.exc.DBAPIError,
conn.execute, "SELECT 'ERROR THREE' FROM I_DONT_EXIST"
)
def test_exception_event_reraise_chaining(self):
engine = engines.testing_engine()
class MyException1(Exception):
pass
class MyException2(Exception):
pass
class MyException3(Exception):
pass
@event.listens_for(engine, 'handle_error', retval=True)
def err1(context):
stmt = context.statement
if "ERROR ONE" in str(stmt) or "ERROR TWO" in str(stmt) \
or "ERROR THREE" in str(stmt):
return MyException1("my exception")
elif "ERROR FOUR" in str(stmt):
raise MyException3("my exception short circuit")
@event.listens_for(engine, 'handle_error', retval=True)
def err2(context):
stmt = context.statement
if ("ERROR ONE" in str(stmt) or "ERROR FOUR" in str(stmt)) \
and isinstance(context.chained_exception, MyException1):
raise MyException2("my exception chained")
elif "ERROR TWO" in str(stmt):
return context.chained_exception
else:
return None
conn = engine.connect()
with patch.object(engine.
dialect.execution_ctx_cls,
"handle_dbapi_exception") as patched:
assert_raises_message(
MyException2,
"my exception chained",
conn.execute, "SELECT 'ERROR ONE' FROM I_DONT_EXIST"
)
eq_(patched.call_count, 1)
with patch.object(engine.
dialect.execution_ctx_cls,
"handle_dbapi_exception") as patched:
assert_raises(
MyException1,
conn.execute, "SELECT 'ERROR TWO' FROM I_DONT_EXIST"
)
eq_(patched.call_count, 1)
with patch.object(engine.
dialect.execution_ctx_cls,
"handle_dbapi_exception") as patched:
# test that non None from err1 isn't cancelled out
# by err2
assert_raises(
MyException1,
conn.execute, "SELECT 'ERROR THREE' FROM I_DONT_EXIST"
)
eq_(patched.call_count, 1)
with patch.object(engine.
dialect.execution_ctx_cls,
"handle_dbapi_exception") as patched:
assert_raises(
tsa.exc.DBAPIError,
conn.execute, "SELECT 'ERROR FIVE' FROM I_DONT_EXIST"
)
eq_(patched.call_count, 1)
with patch.object(engine.
dialect.execution_ctx_cls,
"handle_dbapi_exception") as patched:
assert_raises_message(
MyException3,
"my exception short circuit",
conn.execute, "SELECT 'ERROR FOUR' FROM I_DONT_EXIST"
)
eq_(patched.call_count, 1)
def test_exception_event_ad_hoc_context(self):
"""test that handle_error is called with a context in
cases where _handle_dbapi_error() is normally called without
any context.
"""
engine = engines.testing_engine()
listener = Mock(return_value=None)
event.listen(engine, 'handle_error', listener)
nope = Exception("nope")
class MyType(TypeDecorator):
impl = Integer
def process_bind_param(self, value, dialect):
raise nope
with engine.connect() as conn:
assert_raises_message(
tsa.exc.StatementError,
r"nope \(original cause: Exception: nope\) u?'SELECT 1 ",
conn.execute,
select([1]).where(
column('foo') == literal('bar', MyType()))
)
ctx = listener.mock_calls[0][1][0]
assert ctx.statement.startswith("SELECT 1 ")
is_(ctx.is_disconnect, False)
is_(ctx.original_exception, nope)
def test_exception_event_non_dbapi_error(self):
"""test that dbapi_error is called with a context in
cases where DBAPI raises an exception that is not a DBAPI
exception, e.g. internal errors or encoding problems.
"""
engine = engines.testing_engine()
listener = Mock(return_value=None)
event.listen(engine, 'handle_error', listener)
nope = TypeError("I'm not a DBAPI error")
with engine.connect() as c:
c.connection.cursor = Mock(
return_value=Mock(
execute=Mock(
side_effect=nope
))
)
assert_raises_message(
TypeError,
"I'm not a DBAPI error",
c.execute, "select "
)
ctx = listener.mock_calls[0][1][0]
eq_(ctx.statement, "select ")
is_(ctx.is_disconnect, False)
is_(ctx.original_exception, nope)
def _test_alter_disconnect(self, orig_error, evt_value):
engine = engines.testing_engine()
@event.listens_for(engine, "handle_error")
def evt(ctx):
ctx.is_disconnect = evt_value
with patch.object(engine.dialect, "is_disconnect",
Mock(return_value=orig_error)):
with engine.connect() as c:
try:
c.execute("SELECT x FROM nonexistent")
assert False
except tsa.exc.StatementError as st:
eq_(st.connection_invalidated, evt_value)
def test_alter_disconnect_to_true(self):
self._test_alter_disconnect(False, True)
self._test_alter_disconnect(True, True)
def test_alter_disconnect_to_false(self):
self._test_alter_disconnect(True, False)
self._test_alter_disconnect(False, False)
class ProxyConnectionTest(fixtures.TestBase):
"""These are the same tests as EngineEventsTest, except using
the deprecated ConnectionProxy interface.
"""
__requires__ = 'ad_hoc_engines',
__prefer_requires__ = 'two_phase_transactions',
@testing.uses_deprecated(r'.*Use event.listen')
@testing.fails_on('firebird', 'Data type unknown')
def test_proxy(self):
stmts = []
cursor_stmts = []
class MyProxy(ConnectionProxy):
def execute(
self,
conn,
execute,
clauseelement,
*multiparams,
**params
):
stmts.append((str(clauseelement), params, multiparams))
return execute(clauseelement, *multiparams, **params)
def cursor_execute(
self,
execute,
cursor,
statement,
parameters,
context,
executemany,
):
cursor_stmts.append((str(statement), parameters, None))
return execute(cursor, statement, parameters, context)
def assert_stmts(expected, received):
for stmt, params, posn in expected:
if not received:
assert False, "Nothing available for stmt: %s" % stmt
while received:
teststmt, testparams, testmultiparams = \
received.pop(0)
teststmt = re.compile(r'[\n\t ]+', re.M).sub(' ',
teststmt).strip()
if teststmt.startswith(stmt) and (testparams
== params or testparams == posn):
break
for engine in \
engines.testing_engine(options=dict(implicit_returning=False,
proxy=MyProxy())), \
engines.testing_engine(options=dict(implicit_returning=False,
proxy=MyProxy(),
strategy='threadlocal')):
m = MetaData(engine)
t1 = Table('t1', m,
Column('c1', Integer, primary_key=True),
Column('c2', String(50), default=func.lower('Foo'),
primary_key=True)
)
m.create_all()
try:
t1.insert().execute(c1=5, c2='some data')
t1.insert().execute(c1=6)
eq_(engine.execute('select * from t1').fetchall(), [(5,
'some data'), (6, 'foo')])
finally:
m.drop_all()
engine.dispose()
compiled = [('CREATE TABLE t1', {}, None),
('INSERT INTO t1 (c1, c2)', {'c2': 'some data',
'c1': 5}, None), ('INSERT INTO t1 (c1, c2)',
{'c1': 6}, None), ('select * from t1', {},
None), ('DROP TABLE t1', {}, None)]
if not testing.against('oracle+zxjdbc'): # or engine.dialect.pr
# eexecute_pk_sequence
# s:
cursor = [
('CREATE TABLE t1', {}, ()),
('INSERT INTO t1 (c1, c2)', {'c2': 'some data', 'c1'
: 5}, (5, 'some data')),
('SELECT lower', {'lower_2': 'Foo'},
('Foo', )),
('INSERT INTO t1 (c1, c2)', {'c2': 'foo', 'c1': 6},
(6, 'foo')),
('select * from t1', {}, ()),
('DROP TABLE t1', {}, ()),
]
else:
insert2_params = 6, 'Foo'
if testing.against('oracle+zxjdbc'):
insert2_params += (ReturningParam(12), )
cursor = [('CREATE TABLE t1', {}, ()),
('INSERT INTO t1 (c1, c2)', {'c2': 'some data'
, 'c1': 5}, (5, 'some data')),
('INSERT INTO t1 (c1, c2)', {'c1': 6,
'lower_2': 'Foo'}, insert2_params),
('select * from t1', {}, ()), ('DROP TABLE t1'
, {}, ())] # bind param name 'lower_2' might
# be incorrect
assert_stmts(compiled, stmts)
assert_stmts(cursor, cursor_stmts)
@testing.uses_deprecated(r'.*Use event.listen')
def test_options(self):
canary = []
class TrackProxy(ConnectionProxy):
def __getattribute__(self, key):
fn = object.__getattribute__(self, key)
def go(*arg, **kw):
canary.append(fn.__name__)
return fn(*arg, **kw)
return go
engine = engines.testing_engine(options={'proxy':TrackProxy()})
conn = engine.connect()
c2 = conn.execution_options(foo='bar')
eq_(c2._execution_options, {'foo':'bar'})
c2.execute(select([1]))
c3 = c2.execution_options(bar='bat')
eq_(c3._execution_options, {'foo':'bar', 'bar':'bat'})
eq_(canary, ['execute', 'cursor_execute'])
@testing.uses_deprecated(r'.*Use event.listen')
def test_transactional(self):
canary = []
class TrackProxy(ConnectionProxy):
def __getattribute__(self, key):
fn = object.__getattribute__(self, key)
def go(*arg, **kw):
canary.append(fn.__name__)
return fn(*arg, **kw)
return go
engine = engines.testing_engine(options={'proxy':TrackProxy()})
conn = engine.connect()
trans = conn.begin()
conn.execute(select([1]))
trans.rollback()
trans = conn.begin()
conn.execute(select([1]))
trans.commit()
eq_(canary, [
'begin', 'execute', 'cursor_execute', 'rollback',
'begin', 'execute', 'cursor_execute', 'commit',
])
@testing.uses_deprecated(r'.*Use event.listen')
@testing.requires.savepoints
@testing.requires.two_phase_transactions
def test_transactional_advanced(self):
canary = []
class TrackProxy(ConnectionProxy):
def __getattribute__(self, key):
fn = object.__getattribute__(self, key)
def go(*arg, **kw):
canary.append(fn.__name__)
return fn(*arg, **kw)
return go
engine = engines.testing_engine(options={'proxy':TrackProxy()})
conn = engine.connect()
trans = conn.begin()
trans2 = conn.begin_nested()
conn.execute(select([1]))
trans2.rollback()
trans2 = conn.begin_nested()
conn.execute(select([1]))
trans2.commit()
trans.rollback()
trans = conn.begin_twophase()
conn.execute(select([1]))
trans.prepare()
trans.commit()
canary = [t for t in canary if t not in ('cursor_execute', 'execute')]
eq_(canary, ['begin', 'savepoint',
'rollback_savepoint', 'savepoint', 'release_savepoint',
'rollback', 'begin_twophase',
'prepare_twophase', 'commit_twophase']
)
class DialectEventTest(fixtures.TestBase):
@contextmanager
def _run_test(self, retval):
m1 = Mock()
m1.do_execute.return_value = retval
m1.do_executemany.return_value = retval
m1.do_execute_no_params.return_value = retval
e = engines.testing_engine(options={"_initialize": False})
event.listen(e, "do_execute", m1.do_execute)
event.listen(e, "do_executemany", m1.do_executemany)
event.listen(e, "do_execute_no_params", m1.do_execute_no_params)
e.dialect.do_execute = m1.real_do_execute
e.dialect.do_executemany = m1.real_do_executemany
e.dialect.do_execute_no_params = m1.real_do_execute_no_params
def mock_the_cursor(cursor, *arg):
arg[-1].get_result_proxy = Mock(return_value=Mock(context=arg[-1]))
return retval
m1.real_do_execute.side_effect = m1.do_execute.side_effect = mock_the_cursor
m1.real_do_executemany.side_effect = m1.do_executemany.side_effect = mock_the_cursor
m1.real_do_execute_no_params.side_effect = m1.do_execute_no_params.side_effect = mock_the_cursor
with e.connect() as conn:
yield conn, m1
def _assert(self, retval, m1, m2, mock_calls):
eq_(m1.mock_calls, mock_calls)
if retval:
eq_(m2.mock_calls, [])
else:
eq_(m2.mock_calls, mock_calls)
def _test_do_execute(self, retval):
with self._run_test(retval) as (conn, m1):
result = conn.execute("insert into table foo", {"foo": "bar"})
self._assert(
retval,
m1.do_execute, m1.real_do_execute,
[call(
result.context.cursor,
"insert into table foo",
{"foo": "bar"}, result.context)]
)
def _test_do_executemany(self, retval):
with self._run_test(retval) as (conn, m1):
result = conn.execute("insert into table foo",
[{"foo": "bar"}, {"foo": "bar"}])
self._assert(
retval,
m1.do_executemany, m1.real_do_executemany,
[call(
result.context.cursor,
"insert into table foo",
[{"foo": "bar"}, {"foo": "bar"}], result.context)]
)
def _test_do_execute_no_params(self, retval):
with self._run_test(retval) as (conn, m1):
result = conn.execution_options(no_parameters=True).\
execute("insert into table foo")
self._assert(
retval,
m1.do_execute_no_params, m1.real_do_execute_no_params,
[call(
result.context.cursor,
"insert into table foo", result.context)]
)
def _test_cursor_execute(self, retval):
with self._run_test(retval) as (conn, m1):
dialect = conn.dialect
stmt = "insert into table foo"
params = {"foo": "bar"}
ctx = dialect.execution_ctx_cls._init_statement(
dialect, conn, conn.connection, stmt, [params])
conn._cursor_execute(ctx.cursor, stmt, params, ctx)
self._assert(
retval,
m1.do_execute, m1.real_do_execute,
[call(
ctx.cursor,
"insert into table foo",
{"foo": "bar"}, ctx)]
)
def test_do_execute_w_replace(self):
self._test_do_execute(True)
def test_do_execute_wo_replace(self):
self._test_do_execute(False)
def test_do_executemany_w_replace(self):
self._test_do_executemany(True)
def test_do_executemany_wo_replace(self):
self._test_do_executemany(False)
def test_do_execute_no_params_w_replace(self):
self._test_do_execute_no_params(True)
def test_do_execute_no_params_wo_replace(self):
self._test_do_execute_no_params(False)
def test_cursor_execute_w_replace(self):
self._test_cursor_execute(True)
def test_cursor_execute_wo_replace(self):
self._test_cursor_execute(False)
|
"""
``editquality generate_make -h``
::
Code-generate Makefile from template and configuration
:Usage:
generate_make -h | --help
generate_make
[--config=<path>]
[--main=<filename>]
[--output=<path>]
[--templates=<path>]
[--debug]
:Options:
--config=<path> Directory to search for configuration files
[default: config/]
--main=<filename> Override to use a main template other than the
default [default: Makefile.j2]
--output=<path> Where to write the Makefile output.
[default: <stdout>]
--templates=<path> Directory to search for input templates.
[default: templates/]
--debug Print debug logging
"""
import logging
import os.path
import sys
import docopt
from .. import config
from ..codegen import generate
logger = logging.getLogger(__name__)
def main(argv=None):
args = docopt.docopt(__doc__, argv=argv)
logging.basicConfig(
level=logging.DEBUG if args['--debug'] else logging.WARNING,
format='%(asctime)s %(levelname)s:%(name)s -- %(message)s'
)
config_path = args["--config"]
output_f = sys.stdout \
if args["--output"] == "<stdout>" \
else open(args["--output"], "w")
templates_path = args["--templates"]
main_template_path = args["--main"]
if not os.path.isabs(main_template_path):
# Join a filename to the default templates dir.
main_template_path = os.path.join(templates_path, main_template_path)
with open(main_template_path, "r") as f:
main_template = f.read()
variables = config.load_config(config_path)
output = generate.generate(variables, templates_path, main_template)
output_f.write(output)
|
from msrest.serialization import Model
class Product(Model):
_required = []
_attribute_map = {
'integer': {'key': 'integer', 'type': 'int'},
'string': {'key': 'string', 'type': 'str'},
}
def __init__(self, *args, **kwargs):
"""Product
:param int integer
:param str string
"""
self.integer = None
self.string = None
super(Product, self).__init__(*args, **kwargs)
|
"""
Parser for todo format string.
from todo.parser import parser
parser.parse(string) # return an Todo instance
"""
from models import Task
from models import Todo
from ply import lex
from ply import yacc
class TodoLexer(object):
"""
Lexer for Todo format string.
Tokens
ID e.g. '1.'
DONE e.g. '(x)'
TASK e.g. 'This is a task'
"""
tokens = (
"ID",
"DONE",
"TASK",
)
t_ignore = "\x20\x09" # ignore spaces and tabs
def t_ID(self, t):
r'\d+\.([uU]|[lL]|[uU][lL]|[lL][uU])?'
t.value = int(t.value[:-1])
return t
def t_DONE(self, t):
r'(\(x\))'
return t
def t_TASK(self, t):
r'((?!\(x\))).+'
return t
def t_newline(self, t):
r'\n+'
t.lexer.lineno += len(t.value)
def t_error(self, t):
raise SyntaxError(
"Illegal character: '%s' at Line %d" % (t.value[0], t.lineno)
)
def __init__(self):
self.lexer = lex.lex(module=self)
class TodoParser(object):
"""
Parser for Todo format string, works with a todo lexer.
Parse string to Python list
todo_str = "1. (x) Write email to tom"
TodoParser().parse(todo_str)
"""
tokens = TodoLexer.tokens
def p_error(self, p):
if p:
raise SyntaxError(
"Character '%s' at line %d" % (p.value[0], p.lineno)
)
else:
raise SyntaxError("SyntaxError at EOF")
def p_start(self, p):
"start : translation_unit"
p[0] = self.todo
def p_translation_unit(self, p):
"""
translation_unit : translate_task
| translation_unit translate_task
|
"""
pass
def p_translation_task(self, p):
"""
translate_task : ID DONE TASK
| ID TASK
"""
if len(p) == 4:
done = True
content = p[3]
elif len(p) == 3:
done = False
content = p[2]
task = Task(p[1], content, done)
self.todo.append(task)
def __init__(self):
self.parser = yacc.yacc(module=self, debug=0, write_tables=0)
def parse(self, data):
# reset list
self.todo = Todo()
return self.parser.parse(data)
lexer = TodoLexer() # build lexer
parser = TodoParser() # build parser
|
End of preview. Expand
in Data Studio
README.md exists but content is empty.
- Downloads last month
- 14