repo_name
stringlengths 5
100
| ref
stringlengths 12
67
| path
stringlengths 4
244
| copies
stringlengths 1
8
| content
stringlengths 0
1.05M
โ |
---|---|---|---|---|
zubie7a/Algorithms | refs/heads/master | CodeSignal/Arcade/Intro/Level_06/03_Variable_Name.py | 3 | # https://codefights.com/arcade/intro/level-6/6Wv4WsrsMJ8Y2Fwno
import re
def variableName(name):
# Return if its a valid variable name. It should start with
# an alphabetic character or with underscore, and then can be
# followed by any alphanumeric characters and underscores.
return bool(re.match("^[a-zA-Z_][a-zA-Z_0-9]*$", name))
|
alviano/wasp | refs/heads/master | tests/wasp1/AllAnswerSets/rule_with_60_variables.test.py | 3 | input = """
bar(a).
foo(X1,X2,X3,X4,X5,X6,X7,X8,X9,X10,X11,X12,X13,X14,X15,X16,X17,X18,X19,X20,X21,X22,X23,X24,X25,X26,X27,X28,X29,X30,X31,X32,X33,X34,X35,X36,X37,X38,X39,X40,X41,X42,X43,X44,X45,X46,X47,X48,X49,X50,X51,X52,X53,X54,X55,X56,X57,X58,X59,X60) :- bar(X1),bar(X2),bar(X3),bar(X4),bar(X5),bar(X6),bar(X7),bar(X8),bar(X9),bar(X10),bar(X11),bar(X12),bar(X13),bar(X14),bar(X15),bar(X16),bar(X17),bar(X18),bar(X19),bar(X20),bar(X21),bar(X22),bar(X23),bar(X24),bar(X25),bar(X26),bar(X27),bar(X28),bar(X29),bar(X30),bar(X31),bar(X32),bar(X33),bar(X34),bar(X35),bar(X36),bar(X37),bar(X38),bar(X39),bar(X40),bar(X41),bar(X42),bar(X43),bar(X44),bar(X45),bar(X46),bar(X47),bar(X48),bar(X49),bar(X50),bar(X51),bar(X52),bar(X53),bar(X54),bar(X55),bar(X56),bar(X57),bar(X58),bar(X59),bar(X60).
"""
output = """
{bar(a), foo(a,a,a,a,a,a,a,a,a,a,a,a,a,a,a,a,a,a,a,a,a,a,a,a,a,a,a,a,a,a,a,a,a,a,a,a,a,a,a,a,a,a,a,a,a,a,a,a,a,a,a,a,a,a,a,a,a,a,a,a)}
"""
|
krieger-od/nwjs_chromium.src | refs/heads/master | tools/site_compare/command_line.py | 179 | #!/usr/bin/env python
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Parse a command line, retrieving a command and its arguments.
Supports the concept of command line commands, each with its own set
of arguments. Supports dependent arguments and mutually exclusive arguments.
Basically, a better optparse. I took heed of epg's WHINE() in gvn.cmdline
and dumped optparse in favor of something better.
"""
import os.path
import re
import string
import sys
import textwrap
import types
def IsString(var):
"""Little helper function to see if a variable is a string."""
return type(var) in types.StringTypes
class ParseError(Exception):
"""Encapsulates errors from parsing, string arg is description."""
pass
class Command(object):
"""Implements a single command."""
def __init__(self, names, helptext, validator=None, impl=None):
"""Initializes Command from names and helptext, plus optional callables.
Args:
names: command name, or list of synonyms
helptext: brief string description of the command
validator: callable for custom argument validation
Should raise ParseError if it wants
impl: callable to be invoked when command is called
"""
self.names = names
self.validator = validator
self.helptext = helptext
self.impl = impl
self.args = []
self.required_groups = []
self.arg_dict = {}
self.positional_args = []
self.cmdline = None
class Argument(object):
"""Encapsulates an argument to a command."""
VALID_TYPES = ['string', 'readfile', 'int', 'flag', 'coords']
TYPES_WITH_VALUES = ['string', 'readfile', 'int', 'coords']
def __init__(self, names, helptext, type, metaname,
required, default, positional):
"""Command-line argument to a command.
Args:
names: argument name, or list of synonyms
helptext: brief description of the argument
type: type of the argument. Valid values include:
string - a string
readfile - a file which must exist and be available
for reading
int - an integer
flag - an optional flag (bool)
coords - (x,y) where x and y are ints
metaname: Name to display for value in help, inferred if not
specified
required: True if argument must be specified
default: Default value if not specified
positional: Argument specified by location, not name
Raises:
ValueError: the argument name is invalid for some reason
"""
if type not in Command.Argument.VALID_TYPES:
raise ValueError("Invalid type: %r" % type)
if required and default is not None:
raise ValueError("required and default are mutually exclusive")
if required and type == 'flag':
raise ValueError("A required flag? Give me a break.")
if metaname and type not in Command.Argument.TYPES_WITH_VALUES:
raise ValueError("Type %r can't have a metaname" % type)
# If no metaname is provided, infer it: use the alphabetical characters
# of the last provided name
if not metaname and type in Command.Argument.TYPES_WITH_VALUES:
metaname = (
names[-1].lstrip(string.punctuation + string.whitespace).upper())
self.names = names
self.helptext = helptext
self.type = type
self.required = required
self.default = default
self.positional = positional
self.metaname = metaname
self.mutex = [] # arguments that are mutually exclusive with
# this one
self.depends = [] # arguments that must be present for this
# one to be valid
self.present = False # has this argument been specified?
def AddDependency(self, arg):
"""Makes this argument dependent on another argument.
Args:
arg: name of the argument this one depends on
"""
if arg not in self.depends:
self.depends.append(arg)
def AddMutualExclusion(self, arg):
"""Makes this argument invalid if another is specified.
Args:
arg: name of the mutually exclusive argument.
"""
if arg not in self.mutex:
self.mutex.append(arg)
def GetUsageString(self):
"""Returns a brief string describing the argument's usage."""
if not self.positional:
string = self.names[0]
if self.type in Command.Argument.TYPES_WITH_VALUES:
string += "="+self.metaname
else:
string = self.metaname
if not self.required:
string = "["+string+"]"
return string
def GetNames(self):
"""Returns a string containing a list of the arg's names."""
if self.positional:
return self.metaname
else:
return ", ".join(self.names)
def GetHelpString(self, width=80, indent=5, names_width=20, gutter=2):
"""Returns a help string including help for all the arguments."""
names = [" "*indent + line +" "*(names_width-len(line)) for line in
textwrap.wrap(self.GetNames(), names_width)]
helpstring = textwrap.wrap(self.helptext, width-indent-names_width-gutter)
if len(names) < len(helpstring):
names += [" "*(indent+names_width)]*(len(helpstring)-len(names))
if len(helpstring) < len(names):
helpstring += [""]*(len(names)-len(helpstring))
return "\n".join([name_line + " "*gutter + help_line for
name_line, help_line in zip(names, helpstring)])
def __repr__(self):
if self.present:
string = '= %r' % self.value
else:
string = "(absent)"
return "Argument %s '%s'%s" % (self.type, self.names[0], string)
# end of nested class Argument
def AddArgument(self, names, helptext, type="string", metaname=None,
required=False, default=None, positional=False):
"""Command-line argument to a command.
Args:
names: argument name, or list of synonyms
helptext: brief description of the argument
type: type of the argument
metaname: Name to display for value in help, inferred if not
required: True if argument must be specified
default: Default value if not specified
positional: Argument specified by location, not name
Raises:
ValueError: the argument already exists or is invalid
Returns:
The newly-created argument
"""
if IsString(names): names = [names]
names = [name.lower() for name in names]
for name in names:
if name in self.arg_dict:
raise ValueError("%s is already an argument"%name)
if (positional and required and
[arg for arg in self.args if arg.positional] and
not [arg for arg in self.args if arg.positional][-1].required):
raise ValueError(
"A required positional argument may not follow an optional one.")
arg = Command.Argument(names, helptext, type, metaname,
required, default, positional)
self.args.append(arg)
for name in names:
self.arg_dict[name] = arg
return arg
def GetArgument(self, name):
"""Return an argument from a name."""
return self.arg_dict[name.lower()]
def AddMutualExclusion(self, args):
"""Specifies that a list of arguments are mutually exclusive."""
if len(args) < 2:
raise ValueError("At least two arguments must be specified.")
args = [arg.lower() for arg in args]
for index in xrange(len(args)-1):
for index2 in xrange(index+1, len(args)):
self.arg_dict[args[index]].AddMutualExclusion(self.arg_dict[args[index2]])
def AddDependency(self, dependent, depends_on):
"""Specifies that one argument may only be present if another is.
Args:
dependent: the name of the dependent argument
depends_on: the name of the argument on which it depends
"""
self.arg_dict[dependent.lower()].AddDependency(
self.arg_dict[depends_on.lower()])
def AddMutualDependency(self, args):
"""Specifies that a list of arguments are all mutually dependent."""
if len(args) < 2:
raise ValueError("At least two arguments must be specified.")
args = [arg.lower() for arg in args]
for (arg1, arg2) in [(arg1, arg2) for arg1 in args for arg2 in args]:
if arg1 == arg2: continue
self.arg_dict[arg1].AddDependency(self.arg_dict[arg2])
def AddRequiredGroup(self, args):
"""Specifies that at least one of the named arguments must be present."""
if len(args) < 2:
raise ValueError("At least two arguments must be in a required group.")
args = [self.arg_dict[arg.lower()] for arg in args]
self.required_groups.append(args)
def ParseArguments(self):
"""Given a command line, parse and validate the arguments."""
# reset all the arguments before we parse
for arg in self.args:
arg.present = False
arg.value = None
self.parse_errors = []
# look for arguments remaining on the command line
while len(self.cmdline.rargs):
try:
self.ParseNextArgument()
except ParseError, e:
self.parse_errors.append(e.args[0])
# after all the arguments are parsed, check for problems
for arg in self.args:
if not arg.present and arg.required:
self.parse_errors.append("'%s': required parameter was missing"
% arg.names[0])
if not arg.present and arg.default:
arg.present = True
arg.value = arg.default
if arg.present:
for mutex in arg.mutex:
if mutex.present:
self.parse_errors.append(
"'%s', '%s': arguments are mutually exclusive" %
(arg.argstr, mutex.argstr))
for depend in arg.depends:
if not depend.present:
self.parse_errors.append("'%s': '%s' must be specified as well" %
(arg.argstr, depend.names[0]))
# check for required groups
for group in self.required_groups:
if not [arg for arg in group if arg.present]:
self.parse_errors.append("%s: at least one must be present" %
(", ".join(["'%s'" % arg.names[-1] for arg in group])))
# if we have any validators, invoke them
if not self.parse_errors and self.validator:
try:
self.validator(self)
except ParseError, e:
self.parse_errors.append(e.args[0])
# Helper methods so you can treat the command like a dict
def __getitem__(self, key):
arg = self.arg_dict[key.lower()]
if arg.type == 'flag':
return arg.present
else:
return arg.value
def __iter__(self):
return [arg for arg in self.args if arg.present].__iter__()
def ArgumentPresent(self, key):
"""Tests if an argument exists and has been specified."""
return key.lower() in self.arg_dict and self.arg_dict[key.lower()].present
def __contains__(self, key):
return self.ArgumentPresent(key)
def ParseNextArgument(self):
"""Find the next argument in the command line and parse it."""
arg = None
value = None
argstr = self.cmdline.rargs.pop(0)
# First check: is this a literal argument?
if argstr.lower() in self.arg_dict:
arg = self.arg_dict[argstr.lower()]
if arg.type in Command.Argument.TYPES_WITH_VALUES:
if len(self.cmdline.rargs):
value = self.cmdline.rargs.pop(0)
# Second check: is this of the form "arg=val" or "arg:val"?
if arg is None:
delimiter_pos = -1
for delimiter in [':', '=']:
pos = argstr.find(delimiter)
if pos >= 0:
if delimiter_pos < 0 or pos < delimiter_pos:
delimiter_pos = pos
if delimiter_pos >= 0:
testarg = argstr[:delimiter_pos]
testval = argstr[delimiter_pos+1:]
if testarg.lower() in self.arg_dict:
arg = self.arg_dict[testarg.lower()]
argstr = testarg
value = testval
# Third check: does this begin an argument?
if arg is None:
for key in self.arg_dict.iterkeys():
if (len(key) < len(argstr) and
self.arg_dict[key].type in Command.Argument.TYPES_WITH_VALUES and
argstr[:len(key)].lower() == key):
value = argstr[len(key):]
argstr = argstr[:len(key)]
arg = self.arg_dict[argstr]
# Fourth check: do we have any positional arguments available?
if arg is None:
for positional_arg in [
testarg for testarg in self.args if testarg.positional]:
if not positional_arg.present:
arg = positional_arg
value = argstr
argstr = positional_arg.names[0]
break
# Push the retrieved argument/value onto the largs stack
if argstr: self.cmdline.largs.append(argstr)
if value: self.cmdline.largs.append(value)
# If we've made it this far and haven't found an arg, give up
if arg is None:
raise ParseError("Unknown argument: '%s'" % argstr)
# Convert the value, if necessary
if arg.type in Command.Argument.TYPES_WITH_VALUES and value is None:
raise ParseError("Argument '%s' requires a value" % argstr)
if value is not None:
value = self.StringToValue(value, arg.type, argstr)
arg.argstr = argstr
arg.value = value
arg.present = True
# end method ParseNextArgument
def StringToValue(self, value, type, argstr):
"""Convert a string from the command line to a value type."""
try:
if type == 'string':
pass # leave it be
elif type == 'int':
try:
value = int(value)
except ValueError:
raise ParseError
elif type == 'readfile':
if not os.path.isfile(value):
raise ParseError("'%s': '%s' does not exist" % (argstr, value))
elif type == 'coords':
try:
value = [int(val) for val in
re.match("\(\s*(\d+)\s*\,\s*(\d+)\s*\)\s*\Z", value).
groups()]
except AttributeError:
raise ParseError
else:
raise ValueError("Unknown type: '%s'" % type)
except ParseError, e:
# The bare exception is raised in the generic case; more specific errors
# will arrive with arguments and should just be reraised
if not e.args:
e = ParseError("'%s': unable to convert '%s' to type '%s'" %
(argstr, value, type))
raise e
return value
def SortArgs(self):
"""Returns a method that can be passed to sort() to sort arguments."""
def ArgSorter(arg1, arg2):
"""Helper for sorting arguments in the usage string.
Positional arguments come first, then required arguments,
then optional arguments. Pylint demands this trivial function
have both Args: and Returns: sections, sigh.
Args:
arg1: the first argument to compare
arg2: the second argument to compare
Returns:
-1 if arg1 should be sorted first, +1 if it should be sorted second,
and 0 if arg1 and arg2 have the same sort level.
"""
return ((arg2.positional-arg1.positional)*2 +
(arg2.required-arg1.required))
return ArgSorter
def GetUsageString(self, width=80, name=None):
"""Gets a string describing how the command is used."""
if name is None: name = self.names[0]
initial_indent = "Usage: %s %s " % (self.cmdline.prog, name)
subsequent_indent = " " * len(initial_indent)
sorted_args = self.args[:]
sorted_args.sort(self.SortArgs())
return textwrap.fill(
" ".join([arg.GetUsageString() for arg in sorted_args]), width,
initial_indent=initial_indent,
subsequent_indent=subsequent_indent)
def GetHelpString(self, width=80):
"""Returns a list of help strings for all this command's arguments."""
sorted_args = self.args[:]
sorted_args.sort(self.SortArgs())
return "\n".join([arg.GetHelpString(width) for arg in sorted_args])
# end class Command
class CommandLine(object):
"""Parse a command line, extracting a command and its arguments."""
def __init__(self):
self.commands = []
self.cmd_dict = {}
# Add the help command to the parser
help_cmd = self.AddCommand(["help", "--help", "-?", "-h"],
"Displays help text for a command",
ValidateHelpCommand,
DoHelpCommand)
help_cmd.AddArgument(
"command", "Command to retrieve help for", positional=True)
help_cmd.AddArgument(
"--width", "Width of the output", type='int', default=80)
self.Exit = sys.exit # override this if you don't want the script to halt
# on error or on display of help
self.out = sys.stdout # override these if you want to redirect
self.err = sys.stderr # output or error messages
def AddCommand(self, names, helptext, validator=None, impl=None):
"""Add a new command to the parser.
Args:
names: command name, or list of synonyms
helptext: brief string description of the command
validator: method to validate a command's arguments
impl: callable to be invoked when command is called
Raises:
ValueError: raised if command already added
Returns:
The new command
"""
if IsString(names): names = [names]
for name in names:
if name in self.cmd_dict:
raise ValueError("%s is already a command"%name)
cmd = Command(names, helptext, validator, impl)
cmd.cmdline = self
self.commands.append(cmd)
for name in names:
self.cmd_dict[name.lower()] = cmd
return cmd
def GetUsageString(self):
"""Returns simple usage instructions."""
return "Type '%s help' for usage." % self.prog
def ParseCommandLine(self, argv=None, prog=None, execute=True):
"""Does the work of parsing a command line.
Args:
argv: list of arguments, defaults to sys.args[1:]
prog: name of the command, defaults to the base name of the script
execute: if false, just parse, don't invoke the 'impl' member
Returns:
The command that was executed
"""
if argv is None: argv = sys.argv[1:]
if prog is None: prog = os.path.basename(sys.argv[0]).split('.')[0]
# Store off our parameters, we may need them someday
self.argv = argv
self.prog = prog
# We shouldn't be invoked without arguments, that's just lame
if not len(argv):
self.out.writelines(self.GetUsageString())
self.Exit()
return None # in case the client overrides Exit
# Is it a valid command?
self.command_string = argv[0].lower()
if not self.command_string in self.cmd_dict:
self.err.write("Unknown command: '%s'\n\n" % self.command_string)
self.out.write(self.GetUsageString())
self.Exit()
return None # in case the client overrides Exit
self.command = self.cmd_dict[self.command_string]
# "rargs" = remaining (unparsed) arguments
# "largs" = already parsed, "left" of the read head
self.rargs = argv[1:]
self.largs = []
# let the command object do the parsing
self.command.ParseArguments()
if self.command.parse_errors:
# there were errors, output the usage string and exit
self.err.write(self.command.GetUsageString()+"\n\n")
self.err.write("\n".join(self.command.parse_errors))
self.err.write("\n\n")
self.Exit()
elif execute and self.command.impl:
self.command.impl(self.command)
return self.command
def __getitem__(self, key):
return self.cmd_dict[key]
def __iter__(self):
return self.cmd_dict.__iter__()
def ValidateHelpCommand(command):
"""Checks to make sure an argument to 'help' is a valid command."""
if 'command' in command and command['command'] not in command.cmdline:
raise ParseError("'%s': unknown command" % command['command'])
def DoHelpCommand(command):
"""Executed when the command is 'help'."""
out = command.cmdline.out
width = command['--width']
if 'command' not in command:
out.write(command.GetUsageString())
out.write("\n\n")
indent = 5
gutter = 2
command_width = (
max([len(cmd.names[0]) for cmd in command.cmdline.commands]) + gutter)
for cmd in command.cmdline.commands:
cmd_name = cmd.names[0]
initial_indent = (" "*indent + cmd_name + " "*
(command_width+gutter-len(cmd_name)))
subsequent_indent = " "*(indent+command_width+gutter)
out.write(textwrap.fill(cmd.helptext, width,
initial_indent=initial_indent,
subsequent_indent=subsequent_indent))
out.write("\n")
out.write("\n")
else:
help_cmd = command.cmdline[command['command']]
out.write(textwrap.fill(help_cmd.helptext, width))
out.write("\n\n")
out.write(help_cmd.GetUsageString(width=width))
out.write("\n\n")
out.write(help_cmd.GetHelpString(width=width))
out.write("\n")
command.cmdline.Exit()
def main():
# If we're invoked rather than imported, run some tests
cmdline = CommandLine()
# Since we're testing, override Exit()
def TestExit():
pass
cmdline.Exit = TestExit
# Actually, while we're at it, let's override error output too
cmdline.err = open(os.path.devnull, "w")
test = cmdline.AddCommand(["test", "testa", "testb"], "test command")
test.AddArgument(["-i", "--int", "--integer", "--optint", "--optionalint"],
"optional integer parameter", type='int')
test.AddArgument("--reqint", "required integer parameter", type='int',
required=True)
test.AddArgument("pos1", "required positional argument", positional=True,
required=True)
test.AddArgument("pos2", "optional positional argument", positional=True)
test.AddArgument("pos3", "another optional positional arg",
positional=True)
# mutually dependent arguments
test.AddArgument("--mutdep1", "mutually dependent parameter 1")
test.AddArgument("--mutdep2", "mutually dependent parameter 2")
test.AddArgument("--mutdep3", "mutually dependent parameter 3")
test.AddMutualDependency(["--mutdep1", "--mutdep2", "--mutdep3"])
# mutually exclusive arguments
test.AddArgument("--mutex1", "mutually exclusive parameter 1")
test.AddArgument("--mutex2", "mutually exclusive parameter 2")
test.AddArgument("--mutex3", "mutually exclusive parameter 3")
test.AddMutualExclusion(["--mutex1", "--mutex2", "--mutex3"])
# dependent argument
test.AddArgument("--dependent", "dependent argument")
test.AddDependency("--dependent", "--int")
# other argument types
test.AddArgument("--file", "filename argument", type='readfile')
test.AddArgument("--coords", "coordinate argument", type='coords')
test.AddArgument("--flag", "flag argument", type='flag')
test.AddArgument("--req1", "part of a required group", type='flag')
test.AddArgument("--req2", "part 2 of a required group", type='flag')
test.AddRequiredGroup(["--req1", "--req2"])
# a few failure cases
exception_cases = """
test.AddArgument("failpos", "can't have req'd pos arg after opt",
positional=True, required=True)
+++
test.AddArgument("--int", "this argument already exists")
+++
test.AddDependency("--int", "--doesntexist")
+++
test.AddMutualDependency(["--doesntexist", "--mutdep2"])
+++
test.AddMutualExclusion(["--doesntexist", "--mutex2"])
+++
test.AddArgument("--reqflag", "required flag", required=True, type='flag')
+++
test.AddRequiredGroup(["--req1", "--doesntexist"])
"""
for exception_case in exception_cases.split("+++"):
try:
exception_case = exception_case.strip()
exec exception_case # yes, I'm using exec, it's just for a test.
except ValueError:
# this is expected
pass
except KeyError:
# ...and so is this
pass
else:
print ("FAILURE: expected an exception for '%s'"
" and didn't get it" % exception_case)
# Let's do some parsing! first, the minimal success line:
MIN = "test --reqint 123 param1 --req1 "
# tuples of (command line, expected error count)
test_lines = [
("test --int 3 foo --req1", 1), # missing required named parameter
("test --reqint 3 --req1", 1), # missing required positional parameter
(MIN, 0), # success!
("test param1 --reqint 123 --req1", 0), # success, order shouldn't matter
("test param1 --reqint 123 --req2", 0), # success, any of required group ok
(MIN+"param2", 0), # another positional parameter is okay
(MIN+"param2 param3", 0), # and so are three
(MIN+"param2 param3 param4", 1), # but four are just too many
(MIN+"--int", 1), # where's the value?
(MIN+"--int 456", 0), # this is fine
(MIN+"--int456", 0), # as is this
(MIN+"--int:456", 0), # and this
(MIN+"--int=456", 0), # and this
(MIN+"--file c:\\windows\\system32\\kernel32.dll", 0), # yup
(MIN+"--file c:\\thisdoesntexist", 1), # nope
(MIN+"--mutdep1 a", 2), # no!
(MIN+"--mutdep2 b", 2), # also no!
(MIN+"--mutdep3 c", 2), # dream on!
(MIN+"--mutdep1 a --mutdep2 b", 2), # almost!
(MIN+"--mutdep1 a --mutdep2 b --mutdep3 c", 0), # yes
(MIN+"--mutex1 a", 0), # yes
(MIN+"--mutex2 b", 0), # yes
(MIN+"--mutex3 c", 0), # fine
(MIN+"--mutex1 a --mutex2 b", 1), # not fine
(MIN+"--mutex1 a --mutex2 b --mutex3 c", 3), # even worse
(MIN+"--dependent 1", 1), # no
(MIN+"--dependent 1 --int 2", 0), # ok
(MIN+"--int abc", 1), # bad type
(MIN+"--coords abc", 1), # also bad
(MIN+"--coords (abc)", 1), # getting warmer
(MIN+"--coords (abc,def)", 1), # missing something
(MIN+"--coords (123)", 1), # ooh, so close
(MIN+"--coords (123,def)", 1), # just a little farther
(MIN+"--coords (123,456)", 0), # finally!
("test --int 123 --reqint=456 foo bar --coords(42,88) baz --req1", 0)
]
badtests = 0
for (test, expected_failures) in test_lines:
cmdline.ParseCommandLine([x.strip() for x in test.strip().split(" ")])
if not len(cmdline.command.parse_errors) == expected_failures:
print "FAILED:\n issued: '%s'\n expected: %d\n received: %d\n\n" % (
test, expected_failures, len(cmdline.command.parse_errors))
badtests += 1
print "%d failed out of %d tests" % (badtests, len(test_lines))
cmdline.ParseCommandLine(["help", "test"])
if __name__ == "__main__":
sys.exit(main())
|
mbayon/TFG-MachineLearning | refs/heads/master | vbig/lib/python2.7/site-packages/scipy/signal/tests/test_max_len_seq.py | 20 | from __future__ import division, print_function, absolute_import
import numpy as np
from numpy.testing import assert_allclose, assert_array_equal
from pytest import raises as assert_raises
from numpy.fft import fft, ifft
from scipy.signal import max_len_seq
class TestMLS(object):
def test_mls_inputs(self):
# can't all be zero state
assert_raises(ValueError, max_len_seq,
10, state=np.zeros(10))
# wrong size state
assert_raises(ValueError, max_len_seq, 10,
state=np.ones(3))
# wrong length
assert_raises(ValueError, max_len_seq, 10, length=-1)
assert_array_equal(max_len_seq(10, length=0)[0], [])
# unknown taps
assert_raises(ValueError, max_len_seq, 64)
# bad taps
assert_raises(ValueError, max_len_seq, 10, taps=[-1, 1])
def test_mls_output(self):
# define some alternate working taps
alt_taps = {2: [1], 3: [2], 4: [3], 5: [4, 3, 2], 6: [5, 4, 1], 7: [4],
8: [7, 5, 3]}
# assume the other bit levels work, too slow to test higher orders...
for nbits in range(2, 8):
for state in [None, np.round(np.random.rand(nbits))]:
for taps in [None, alt_taps[nbits]]:
if state is not None and np.all(state == 0):
state[0] = 1 # they can't all be zero
orig_m = max_len_seq(nbits, state=state,
taps=taps)[0]
m = 2. * orig_m - 1. # convert to +/- 1 representation
# First, make sure we got all 1's or -1
err_msg = "mls had non binary terms"
assert_array_equal(np.abs(m), np.ones_like(m),
err_msg=err_msg)
# Test via circular cross-correlation, which is just mult.
# in the frequency domain with one signal conjugated
tester = np.real(ifft(fft(m) * np.conj(fft(m))))
out_len = 2**nbits - 1
# impulse amplitude == test_len
err_msg = "mls impulse has incorrect value"
assert_allclose(tester[0], out_len, err_msg=err_msg)
# steady-state is -1
err_msg = "mls steady-state has incorrect value"
assert_allclose(tester[1:], -1 * np.ones(out_len - 1),
err_msg=err_msg)
# let's do the split thing using a couple options
for n in (1, 2**(nbits - 1)):
m1, s1 = max_len_seq(nbits, state=state, taps=taps,
length=n)
m2, s2 = max_len_seq(nbits, state=s1, taps=taps,
length=1)
m3, s3 = max_len_seq(nbits, state=s2, taps=taps,
length=out_len - n - 1)
new_m = np.concatenate((m1, m2, m3))
assert_array_equal(orig_m, new_m)
|
genouest/biomaj-download | refs/heads/master | biomaj_download/message/downmessage_pb2.py | 1 | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: downmessage.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='downmessage.proto',
package='biomaj.download',
syntax='proto2',
serialized_options=None,
serialized_pb=_b('\n\x11\x64ownmessage.proto\x12\x0f\x62iomaj.download\"\x9d\x02\n\x04\x46ile\x12\x0c\n\x04name\x18\x01 \x02(\t\x12\x0c\n\x04root\x18\x02 \x01(\t\x12\x0f\n\x07save_as\x18\x03 \x01(\t\x12\x0b\n\x03url\x18\x04 \x01(\t\x12\x30\n\x08metadata\x18\x05 \x01(\x0b\x32\x1e.biomaj.download.File.MetaData\x1a\xa8\x01\n\x08MetaData\x12\x13\n\x0bpermissions\x18\x01 \x01(\t\x12\r\n\x05group\x18\x02 \x01(\t\x12\x0c\n\x04size\x18\x03 \x01(\x03\x12\x0c\n\x04hash\x18\x04 \x01(\t\x12\x0c\n\x04year\x18\x05 \x01(\x05\x12\r\n\x05month\x18\x06 \x01(\x05\x12\x0b\n\x03\x64\x61y\x18\x07 \x01(\x05\x12\x0e\n\x06\x66ormat\x18\x08 \x01(\t\x12\x0b\n\x03md5\x18\t \x01(\t\x12\x15\n\rdownload_time\x18\n \x01(\x03\"0\n\x08\x46ileList\x12$\n\x05\x66iles\x18\x01 \x03(\x0b\x32\x15.biomaj.download.File\"\xaa\x02\n\tOperation\x12\x32\n\x04type\x18\x01 \x02(\x0e\x32$.biomaj.download.Operation.OPERATION\x12/\n\x08\x64ownload\x18\x02 \x01(\x0b\x32\x1d.biomaj.download.DownloadFile\x12)\n\x07process\x18\x03 \x01(\x0b\x32\x18.biomaj.download.Process\x12/\n\x05trace\x18\x04 \x01(\x0b\x32 .biomaj.download.Operation.Trace\x1a*\n\x05Trace\x12\x10\n\x08trace_id\x18\x01 \x02(\t\x12\x0f\n\x07span_id\x18\x02 \x02(\t\"0\n\tOPERATION\x12\x08\n\x04LIST\x10\x00\x12\x0c\n\x08\x44OWNLOAD\x10\x01\x12\x0b\n\x07PROCESS\x10\x02\"\x17\n\x07Process\x12\x0c\n\x04\x65xec\x18\x01 \x02(\t\"\x94\x0b\n\x0c\x44ownloadFile\x12\x0c\n\x04\x62\x61nk\x18\x01 \x02(\t\x12\x0f\n\x07session\x18\x02 \x02(\t\x12\x11\n\tlocal_dir\x18\x03 \x02(\t\x12\x18\n\x10timeout_download\x18\x04 \x01(\x05\x12=\n\x0bremote_file\x18\x05 \x02(\x0b\x32(.biomaj.download.DownloadFile.RemoteFile\x12\x32\n\x05proxy\x18\x06 \x01(\x0b\x32#.biomaj.download.DownloadFile.Proxy\x12\x43\n\x0bhttp_method\x18\x08 \x01(\x0e\x32).biomaj.download.DownloadFile.HTTP_METHOD:\x03GET\x12;\n\x07options\x18\t \x03(\x0b\x32*.biomaj.download.DownloadFile.OptionsEntry\x1a$\n\x05Param\x12\x0c\n\x04name\x18\x01 \x02(\t\x12\r\n\x05value\x18\x02 \x02(\t\x1a\xcd\x03\n\tHttpParse\x12\x91\x01\n\x08\x64ir_line\x18\x01 \x02(\t:\x7f<img[\\s]+src=\"[\\S]+\"[\\s]+alt=\"\\[DIR\\]\"[\\s]*/?>[\\s]*<a[\\s]+href=\"([\\S]+)/\"[\\s]*>.*([\\d]{2}-[\\w\\d]{2,5}-[\\d]{4}\\s[\\d]{2}:[\\d]{2})\x12\xa5\x01\n\tfile_line\x18\x02 \x02(\t:\x91\x01<img[\\s]+src=\"[\\S]+\"[\\s]+alt=\"\\[[\\s]+\\]\"[\\s]*/?>[\\s]<a[\\s]+href=\"([\\S]+)\".*([\\d]{2}-[\\w\\d]{2,5}-[\\d]{4}\\s[\\d]{2}:[\\d]{2})[\\s]+([\\d\\.]+[MKG]{0,1})\x12\x13\n\x08\x64ir_name\x18\x03 \x02(\x05:\x01\x31\x12\x13\n\x08\x64ir_date\x18\x04 \x02(\x05:\x01\x32\x12\x14\n\tfile_name\x18\x05 \x02(\x05:\x01\x31\x12\x14\n\tfile_date\x18\x06 \x02(\x05:\x01\x32\x12\x18\n\x10\x66ile_date_format\x18\x07 \x01(\t\x12\x14\n\tfile_size\x18\x08 \x02(\x05:\x01\x33\x1a\xb8\x02\n\nRemoteFile\x12$\n\x05\x66iles\x18\x01 \x03(\x0b\x32\x15.biomaj.download.File\x12\x38\n\x08protocol\x18\x02 \x02(\x0e\x32&.biomaj.download.DownloadFile.Protocol\x12\x0e\n\x06server\x18\x03 \x02(\t\x12\x12\n\nremote_dir\x18\x04 \x02(\t\x12\x0f\n\x07save_as\x18\x05 \x01(\t\x12\x32\n\x05param\x18\x06 \x03(\x0b\x32#.biomaj.download.DownloadFile.Param\x12;\n\nhttp_parse\x18\x07 \x01(\x0b\x32\'.biomaj.download.DownloadFile.HttpParse\x12\x13\n\x0b\x63redentials\x18\x08 \x01(\t\x12\x0f\n\x07matches\x18\t \x03(\t\x1a*\n\x05Proxy\x12\r\n\x05proxy\x18\x01 \x02(\t\x12\x12\n\nproxy_auth\x18\x02 \x01(\t\x1a.\n\x0cOptionsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\x93\x01\n\x08Protocol\x12\x07\n\x03\x46TP\x10\x00\x12\x08\n\x04\x46TPS\x10\x01\x12\x08\n\x04HTTP\x10\x02\x12\t\n\x05HTTPS\x10\x03\x12\r\n\tDIRECTFTP\x10\x04\x12\x0e\n\nDIRECTHTTP\x10\x05\x12\x0f\n\x0b\x44IRECTHTTPS\x10\x06\x12\t\n\x05LOCAL\x10\x07\x12\t\n\x05RSYNC\x10\x08\x12\t\n\x05IRODS\x10\t\x12\x0e\n\nDIRECTFTPS\x10\n\" \n\x0bHTTP_METHOD\x12\x07\n\x03GET\x10\x00\x12\x08\n\x04POST\x10\x01')
)
_OPERATION_OPERATION = _descriptor.EnumDescriptor(
name='OPERATION',
full_name='biomaj.download.Operation.OPERATION',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='LIST', index=0, number=0,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DOWNLOAD', index=1, number=1,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='PROCESS', index=2, number=2,
serialized_options=None,
type=None),
],
containing_type=None,
serialized_options=None,
serialized_start=627,
serialized_end=675,
)
_sym_db.RegisterEnumDescriptor(_OPERATION_OPERATION)
_DOWNLOADFILE_PROTOCOL = _descriptor.EnumDescriptor(
name='Protocol',
full_name='biomaj.download.DownloadFile.Protocol',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='FTP', index=0, number=0,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='FTPS', index=1, number=1,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='HTTP', index=2, number=2,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='HTTPS', index=3, number=3,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DIRECTFTP', index=4, number=4,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DIRECTHTTP', index=5, number=5,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DIRECTHTTPS', index=6, number=6,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='LOCAL', index=7, number=7,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='RSYNC', index=8, number=8,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='IRODS', index=9, number=9,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DIRECTFTPS', index=10, number=10,
serialized_options=None,
type=None),
],
containing_type=None,
serialized_options=None,
serialized_start=1950,
serialized_end=2097,
)
_sym_db.RegisterEnumDescriptor(_DOWNLOADFILE_PROTOCOL)
_DOWNLOADFILE_HTTP_METHOD = _descriptor.EnumDescriptor(
name='HTTP_METHOD',
full_name='biomaj.download.DownloadFile.HTTP_METHOD',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='GET', index=0, number=0,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='POST', index=1, number=1,
serialized_options=None,
type=None),
],
containing_type=None,
serialized_options=None,
serialized_start=2099,
serialized_end=2131,
)
_sym_db.RegisterEnumDescriptor(_DOWNLOADFILE_HTTP_METHOD)
_FILE_METADATA = _descriptor.Descriptor(
name='MetaData',
full_name='biomaj.download.File.MetaData',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='permissions', full_name='biomaj.download.File.MetaData.permissions', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='group', full_name='biomaj.download.File.MetaData.group', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='size', full_name='biomaj.download.File.MetaData.size', index=2,
number=3, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='hash', full_name='biomaj.download.File.MetaData.hash', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='year', full_name='biomaj.download.File.MetaData.year', index=4,
number=5, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='month', full_name='biomaj.download.File.MetaData.month', index=5,
number=6, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='day', full_name='biomaj.download.File.MetaData.day', index=6,
number=7, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='format', full_name='biomaj.download.File.MetaData.format', index=7,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='md5', full_name='biomaj.download.File.MetaData.md5', index=8,
number=9, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='download_time', full_name='biomaj.download.File.MetaData.download_time', index=9,
number=10, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=156,
serialized_end=324,
)
_FILE = _descriptor.Descriptor(
name='File',
full_name='biomaj.download.File',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='biomaj.download.File.name', index=0,
number=1, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='root', full_name='biomaj.download.File.root', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='save_as', full_name='biomaj.download.File.save_as', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='url', full_name='biomaj.download.File.url', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='metadata', full_name='biomaj.download.File.metadata', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_FILE_METADATA, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=39,
serialized_end=324,
)
_FILELIST = _descriptor.Descriptor(
name='FileList',
full_name='biomaj.download.FileList',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='files', full_name='biomaj.download.FileList.files', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=326,
serialized_end=374,
)
_OPERATION_TRACE = _descriptor.Descriptor(
name='Trace',
full_name='biomaj.download.Operation.Trace',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='trace_id', full_name='biomaj.download.Operation.Trace.trace_id', index=0,
number=1, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='span_id', full_name='biomaj.download.Operation.Trace.span_id', index=1,
number=2, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=583,
serialized_end=625,
)
_OPERATION = _descriptor.Descriptor(
name='Operation',
full_name='biomaj.download.Operation',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='type', full_name='biomaj.download.Operation.type', index=0,
number=1, type=14, cpp_type=8, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='download', full_name='biomaj.download.Operation.download', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='process', full_name='biomaj.download.Operation.process', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='trace', full_name='biomaj.download.Operation.trace', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_OPERATION_TRACE, ],
enum_types=[
_OPERATION_OPERATION,
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=377,
serialized_end=675,
)
_PROCESS = _descriptor.Descriptor(
name='Process',
full_name='biomaj.download.Process',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='exec', full_name='biomaj.download.Process.exec', index=0,
number=1, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=677,
serialized_end=700,
)
_DOWNLOADFILE_PARAM = _descriptor.Descriptor(
name='Param',
full_name='biomaj.download.DownloadFile.Param',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='biomaj.download.DownloadFile.Param.name', index=0,
number=1, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value', full_name='biomaj.download.DownloadFile.Param.value', index=1,
number=2, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=1040,
serialized_end=1076,
)
_DOWNLOADFILE_HTTPPARSE = _descriptor.Descriptor(
name='HttpParse',
full_name='biomaj.download.DownloadFile.HttpParse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='dir_line', full_name='biomaj.download.DownloadFile.HttpParse.dir_line', index=0,
number=1, type=9, cpp_type=9, label=2,
has_default_value=True, default_value=_b("<img[\\s]+src=\"[\\S]+\"[\\s]+alt=\"\\[DIR\\]\"[\\s]*/?>[\\s]*<a[\\s]+href=\"([\\S]+)/\"[\\s]*>.*([\\d]{2}-[\\w\\d]{2,5}-[\\d]{4}\\s[\\d]{2}:[\\d]{2})").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='file_line', full_name='biomaj.download.DownloadFile.HttpParse.file_line', index=1,
number=2, type=9, cpp_type=9, label=2,
has_default_value=True, default_value=_b("<img[\\s]+src=\"[\\S]+\"[\\s]+alt=\"\\[[\\s]+\\]\"[\\s]*/?>[\\s]<a[\\s]+href=\"([\\S]+)\".*([\\d]{2}-[\\w\\d]{2,5}-[\\d]{4}\\s[\\d]{2}:[\\d]{2})[\\s]+([\\d\\.]+[MKG]{0,1})").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='dir_name', full_name='biomaj.download.DownloadFile.HttpParse.dir_name', index=2,
number=3, type=5, cpp_type=1, label=2,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='dir_date', full_name='biomaj.download.DownloadFile.HttpParse.dir_date', index=3,
number=4, type=5, cpp_type=1, label=2,
has_default_value=True, default_value=2,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='file_name', full_name='biomaj.download.DownloadFile.HttpParse.file_name', index=4,
number=5, type=5, cpp_type=1, label=2,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='file_date', full_name='biomaj.download.DownloadFile.HttpParse.file_date', index=5,
number=6, type=5, cpp_type=1, label=2,
has_default_value=True, default_value=2,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='file_date_format', full_name='biomaj.download.DownloadFile.HttpParse.file_date_format', index=6,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='file_size', full_name='biomaj.download.DownloadFile.HttpParse.file_size', index=7,
number=8, type=5, cpp_type=1, label=2,
has_default_value=True, default_value=3,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=1079,
serialized_end=1540,
)
_DOWNLOADFILE_REMOTEFILE = _descriptor.Descriptor(
name='RemoteFile',
full_name='biomaj.download.DownloadFile.RemoteFile',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='files', full_name='biomaj.download.DownloadFile.RemoteFile.files', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='protocol', full_name='biomaj.download.DownloadFile.RemoteFile.protocol', index=1,
number=2, type=14, cpp_type=8, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='server', full_name='biomaj.download.DownloadFile.RemoteFile.server', index=2,
number=3, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='remote_dir', full_name='biomaj.download.DownloadFile.RemoteFile.remote_dir', index=3,
number=4, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='save_as', full_name='biomaj.download.DownloadFile.RemoteFile.save_as', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='param', full_name='biomaj.download.DownloadFile.RemoteFile.param', index=5,
number=6, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='http_parse', full_name='biomaj.download.DownloadFile.RemoteFile.http_parse', index=6,
number=7, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='credentials', full_name='biomaj.download.DownloadFile.RemoteFile.credentials', index=7,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='matches', full_name='biomaj.download.DownloadFile.RemoteFile.matches', index=8,
number=9, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=1543,
serialized_end=1855,
)
_DOWNLOADFILE_PROXY = _descriptor.Descriptor(
name='Proxy',
full_name='biomaj.download.DownloadFile.Proxy',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='proxy', full_name='biomaj.download.DownloadFile.Proxy.proxy', index=0,
number=1, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='proxy_auth', full_name='biomaj.download.DownloadFile.Proxy.proxy_auth', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=1857,
serialized_end=1899,
)
_DOWNLOADFILE_OPTIONSENTRY = _descriptor.Descriptor(
name='OptionsEntry',
full_name='biomaj.download.DownloadFile.OptionsEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='biomaj.download.DownloadFile.OptionsEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value', full_name='biomaj.download.DownloadFile.OptionsEntry.value', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=_b('8\001'),
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=1901,
serialized_end=1947,
)
_DOWNLOADFILE = _descriptor.Descriptor(
name='DownloadFile',
full_name='biomaj.download.DownloadFile',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='bank', full_name='biomaj.download.DownloadFile.bank', index=0,
number=1, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='session', full_name='biomaj.download.DownloadFile.session', index=1,
number=2, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='local_dir', full_name='biomaj.download.DownloadFile.local_dir', index=2,
number=3, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='timeout_download', full_name='biomaj.download.DownloadFile.timeout_download', index=3,
number=4, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='remote_file', full_name='biomaj.download.DownloadFile.remote_file', index=4,
number=5, type=11, cpp_type=10, label=2,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='proxy', full_name='biomaj.download.DownloadFile.proxy', index=5,
number=6, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='http_method', full_name='biomaj.download.DownloadFile.http_method', index=6,
number=8, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='options', full_name='biomaj.download.DownloadFile.options', index=7,
number=9, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_DOWNLOADFILE_PARAM, _DOWNLOADFILE_HTTPPARSE, _DOWNLOADFILE_REMOTEFILE, _DOWNLOADFILE_PROXY, _DOWNLOADFILE_OPTIONSENTRY, ],
enum_types=[
_DOWNLOADFILE_PROTOCOL,
_DOWNLOADFILE_HTTP_METHOD,
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=703,
serialized_end=2131,
)
_FILE_METADATA.containing_type = _FILE
_FILE.fields_by_name['metadata'].message_type = _FILE_METADATA
_FILELIST.fields_by_name['files'].message_type = _FILE
_OPERATION_TRACE.containing_type = _OPERATION
_OPERATION.fields_by_name['type'].enum_type = _OPERATION_OPERATION
_OPERATION.fields_by_name['download'].message_type = _DOWNLOADFILE
_OPERATION.fields_by_name['process'].message_type = _PROCESS
_OPERATION.fields_by_name['trace'].message_type = _OPERATION_TRACE
_OPERATION_OPERATION.containing_type = _OPERATION
_DOWNLOADFILE_PARAM.containing_type = _DOWNLOADFILE
_DOWNLOADFILE_HTTPPARSE.containing_type = _DOWNLOADFILE
_DOWNLOADFILE_REMOTEFILE.fields_by_name['files'].message_type = _FILE
_DOWNLOADFILE_REMOTEFILE.fields_by_name['protocol'].enum_type = _DOWNLOADFILE_PROTOCOL
_DOWNLOADFILE_REMOTEFILE.fields_by_name['param'].message_type = _DOWNLOADFILE_PARAM
_DOWNLOADFILE_REMOTEFILE.fields_by_name['http_parse'].message_type = _DOWNLOADFILE_HTTPPARSE
_DOWNLOADFILE_REMOTEFILE.containing_type = _DOWNLOADFILE
_DOWNLOADFILE_PROXY.containing_type = _DOWNLOADFILE
_DOWNLOADFILE_OPTIONSENTRY.containing_type = _DOWNLOADFILE
_DOWNLOADFILE.fields_by_name['remote_file'].message_type = _DOWNLOADFILE_REMOTEFILE
_DOWNLOADFILE.fields_by_name['proxy'].message_type = _DOWNLOADFILE_PROXY
_DOWNLOADFILE.fields_by_name['http_method'].enum_type = _DOWNLOADFILE_HTTP_METHOD
_DOWNLOADFILE.fields_by_name['options'].message_type = _DOWNLOADFILE_OPTIONSENTRY
_DOWNLOADFILE_PROTOCOL.containing_type = _DOWNLOADFILE
_DOWNLOADFILE_HTTP_METHOD.containing_type = _DOWNLOADFILE
DESCRIPTOR.message_types_by_name['File'] = _FILE
DESCRIPTOR.message_types_by_name['FileList'] = _FILELIST
DESCRIPTOR.message_types_by_name['Operation'] = _OPERATION
DESCRIPTOR.message_types_by_name['Process'] = _PROCESS
DESCRIPTOR.message_types_by_name['DownloadFile'] = _DOWNLOADFILE
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
File = _reflection.GeneratedProtocolMessageType('File', (_message.Message,), dict(
MetaData = _reflection.GeneratedProtocolMessageType('MetaData', (_message.Message,), dict(
DESCRIPTOR = _FILE_METADATA,
__module__ = 'downmessage_pb2'
# @@protoc_insertion_point(class_scope:biomaj.download.File.MetaData)
))
,
DESCRIPTOR = _FILE,
__module__ = 'downmessage_pb2'
# @@protoc_insertion_point(class_scope:biomaj.download.File)
))
_sym_db.RegisterMessage(File)
_sym_db.RegisterMessage(File.MetaData)
FileList = _reflection.GeneratedProtocolMessageType('FileList', (_message.Message,), dict(
DESCRIPTOR = _FILELIST,
__module__ = 'downmessage_pb2'
# @@protoc_insertion_point(class_scope:biomaj.download.FileList)
))
_sym_db.RegisterMessage(FileList)
Operation = _reflection.GeneratedProtocolMessageType('Operation', (_message.Message,), dict(
Trace = _reflection.GeneratedProtocolMessageType('Trace', (_message.Message,), dict(
DESCRIPTOR = _OPERATION_TRACE,
__module__ = 'downmessage_pb2'
# @@protoc_insertion_point(class_scope:biomaj.download.Operation.Trace)
))
,
DESCRIPTOR = _OPERATION,
__module__ = 'downmessage_pb2'
# @@protoc_insertion_point(class_scope:biomaj.download.Operation)
))
_sym_db.RegisterMessage(Operation)
_sym_db.RegisterMessage(Operation.Trace)
Process = _reflection.GeneratedProtocolMessageType('Process', (_message.Message,), dict(
DESCRIPTOR = _PROCESS,
__module__ = 'downmessage_pb2'
# @@protoc_insertion_point(class_scope:biomaj.download.Process)
))
_sym_db.RegisterMessage(Process)
DownloadFile = _reflection.GeneratedProtocolMessageType('DownloadFile', (_message.Message,), dict(
Param = _reflection.GeneratedProtocolMessageType('Param', (_message.Message,), dict(
DESCRIPTOR = _DOWNLOADFILE_PARAM,
__module__ = 'downmessage_pb2'
# @@protoc_insertion_point(class_scope:biomaj.download.DownloadFile.Param)
))
,
HttpParse = _reflection.GeneratedProtocolMessageType('HttpParse', (_message.Message,), dict(
DESCRIPTOR = _DOWNLOADFILE_HTTPPARSE,
__module__ = 'downmessage_pb2'
# @@protoc_insertion_point(class_scope:biomaj.download.DownloadFile.HttpParse)
))
,
RemoteFile = _reflection.GeneratedProtocolMessageType('RemoteFile', (_message.Message,), dict(
DESCRIPTOR = _DOWNLOADFILE_REMOTEFILE,
__module__ = 'downmessage_pb2'
# @@protoc_insertion_point(class_scope:biomaj.download.DownloadFile.RemoteFile)
))
,
Proxy = _reflection.GeneratedProtocolMessageType('Proxy', (_message.Message,), dict(
DESCRIPTOR = _DOWNLOADFILE_PROXY,
__module__ = 'downmessage_pb2'
# @@protoc_insertion_point(class_scope:biomaj.download.DownloadFile.Proxy)
))
,
OptionsEntry = _reflection.GeneratedProtocolMessageType('OptionsEntry', (_message.Message,), dict(
DESCRIPTOR = _DOWNLOADFILE_OPTIONSENTRY,
__module__ = 'downmessage_pb2'
# @@protoc_insertion_point(class_scope:biomaj.download.DownloadFile.OptionsEntry)
))
,
DESCRIPTOR = _DOWNLOADFILE,
__module__ = 'downmessage_pb2'
# @@protoc_insertion_point(class_scope:biomaj.download.DownloadFile)
))
_sym_db.RegisterMessage(DownloadFile)
_sym_db.RegisterMessage(DownloadFile.Param)
_sym_db.RegisterMessage(DownloadFile.HttpParse)
_sym_db.RegisterMessage(DownloadFile.RemoteFile)
_sym_db.RegisterMessage(DownloadFile.Proxy)
_sym_db.RegisterMessage(DownloadFile.OptionsEntry)
_DOWNLOADFILE_OPTIONSENTRY._options = None
# @@protoc_insertion_point(module_scope)
|
nibanks/openthread | refs/heads/master | tests/scripts/thread-cert/Cert_5_6_05_NetworkDataRegisterAfterAttachRouter.py | 5 | #!/usr/bin/python
#
# Copyright (c) 2016, The OpenThread Authors.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
import time
import unittest
import node
LEADER = 1
ROUTER = 2
ED1 = 3
SED1 = 4
class Cert_5_6_5_NetworkDataRegisterAfterAttachRouter(unittest.TestCase):
def setUp(self):
self.nodes = {}
for i in range(1,5):
self.nodes[i] = node.Node(i)
self.nodes[LEADER].set_panid(0xface)
self.nodes[LEADER].set_mode('rsdn')
self.nodes[LEADER].add_whitelist(self.nodes[ROUTER].get_addr64())
self.nodes[LEADER].enable_whitelist()
self.nodes[ROUTER].set_panid(0xface)
self.nodes[ROUTER].set_mode('rsdn')
self.nodes[ROUTER].add_whitelist(self.nodes[LEADER].get_addr64())
self.nodes[ROUTER].add_whitelist(self.nodes[ED1].get_addr64())
self.nodes[ROUTER].add_whitelist(self.nodes[SED1].get_addr64())
self.nodes[ROUTER].enable_whitelist()
self.nodes[ROUTER].set_router_selection_jitter(1)
self.nodes[ED1].set_panid(0xface)
self.nodes[ED1].set_mode('rsn')
self.nodes[ED1].add_whitelist(self.nodes[ROUTER].get_addr64())
self.nodes[ED1].enable_whitelist()
self.nodes[SED1].set_panid(0xface)
self.nodes[SED1].set_mode('s')
self.nodes[SED1].add_whitelist(self.nodes[ROUTER].get_addr64())
self.nodes[SED1].enable_whitelist()
self.nodes[SED1].set_timeout(3)
def tearDown(self):
for node in list(self.nodes.values()):
node.stop()
del self.nodes
def test(self):
self.nodes[LEADER].start()
self.nodes[LEADER].set_state('leader')
self.assertEqual(self.nodes[LEADER].get_state(), 'leader')
self.nodes[ROUTER].start()
time.sleep(5)
self.assertEqual(self.nodes[ROUTER].get_state(), 'router')
self.nodes[ED1].start()
time.sleep(5)
self.assertEqual(self.nodes[ED1].get_state(), 'child')
self.nodes[SED1].start()
time.sleep(5)
self.assertEqual(self.nodes[SED1].get_state(), 'child')
self.nodes[ROUTER].add_prefix('2001:2:0:1::/64', 'paros')
self.nodes[ROUTER].add_prefix('2001:2:0:2::/64', 'paro')
self.nodes[ROUTER].register_netdata()
time.sleep(10)
addrs = self.nodes[ED1].get_addrs()
self.assertTrue(any('2001:2:0:1' in addr[0:10] for addr in addrs))
self.assertTrue(any('2001:2:0:2' in addr[0:10] for addr in addrs))
for addr in addrs:
if addr[0:3] == '200':
self.assertTrue(self.nodes[LEADER].ping(addr))
addrs = self.nodes[SED1].get_addrs()
self.assertTrue(any('2001:2:0:1' in addr[0:10] for addr in addrs))
self.assertFalse(any('2001:2:0:2' in addr[0:10] for addr in addrs))
for addr in addrs:
if addr[0:3] == '200':
self.assertTrue(self.nodes[LEADER].ping(addr))
self.nodes[ROUTER].add_prefix('2001:2:0:3::/64', 'pacs')
self.nodes[ROUTER].register_netdata()
time.sleep(10)
addrs = self.nodes[ED1].get_addrs()
self.assertTrue(any('2001:2:0:1' in addr[0:10] for addr in addrs))
self.assertTrue(any('2001:2:0:2' in addr[0:10] for addr in addrs))
self.assertTrue(any('2001:2:0:3' in addr[0:10] for addr in addrs))
for addr in addrs:
if addr[0:3] == '200':
self.assertTrue(self.nodes[LEADER].ping(addr))
addrs = self.nodes[SED1].get_addrs()
self.assertTrue(any('2001:2:0:1' in addr[0:10] for addr in addrs))
self.assertFalse(any('2001:2:0:2' in addr[0:10] for addr in addrs))
self.assertTrue(any('2001:2:0:3' in addr[0:10] for addr in addrs))
for addr in addrs:
if addr[0:3] == '200':
self.assertTrue(self.nodes[LEADER].ping(addr))
if __name__ == '__main__':
unittest.main()
|
fperez/sympy | refs/heads/master | sympy/core/tests/test_var.py | 3 | # Tests for var are in their own file, because var pollutes global namespace.
from sympy import Symbol, var, raises
# make z1 with call-depth = 1
def make_z1():
var("z1")
# make z2 with call-depth = 2
def __make_z2():
var("z2")
def make_z2():
__make_z2()
def test_var():
var("a")
assert a == Symbol("a")
var("b bb cc zz _x")
assert b == Symbol("b")
assert bb == Symbol("bb")
assert cc == Symbol("cc")
assert zz == Symbol("zz")
assert _x == Symbol("_x")
v = var(['d','e','fg'])
assert d == Symbol('d')
assert e == Symbol('e')
assert fg == Symbol('fg')
# check return value
assert v == (d, e, fg)
# see if var() really injects into global namespace
raises(NameError, "z1")
make_z1()
assert z1 == Symbol("z1")
raises(NameError, "z2")
make_z2()
assert z2 == Symbol("z2")
def test_var_return():
v1 = var('')
v2 = var('q')
v3 = var('q p')
assert v1 == None
assert v2 == Symbol('q')
assert v3 == (Symbol('q'), Symbol('p'))
def test_var_accepts_comma():
v1 = var('x y z')
v2 = var('x,y,z')
v3 = var('x,y z')
assert v1 == v2
assert v1 == v3
def test_var_keywords():
var('x y', real=True)
assert x.is_real and y.is_real
|
cyrus-/typy | refs/heads/typy | tests/test_gpce16.py | 1 | """Testing the examples from the GPCE 2016 paper.
To run:
$ py.test test_gpce16.py
"""
import pytest
import ast
import typy
from typy.std import component, unit, record, string, py, fn, finsum, tpl
class TestGPCEExamples:
@pytest.fixture
def Listing1(self):
# simplified to use string rather than string_in for now
# TODO string_in
@component
def Listing1():
Account [type] = record[
name : string,
account_num : string,
memo : py
]
test_acct [: Account] = {
name: "Harry Q. Bovik",
account_num: "00-12345678",
memo: { }
}
return Listing1
def test_Listing1(self, Listing1):
c = Listing1
assert isinstance(c, typy.Component)
# parsing
assert isinstance(c._members, tuple)
assert len(c._members) == 2
assert isinstance(c._members[0], typy._components.TypeMember)
assert c._members[0].id == "Account"
assert isinstance(c._members[0].uty_expr, typy._ty_exprs.UCanonicalTy)
assert isinstance(c._members[0].uty_expr.fragment_ast, ast.Name)
assert isinstance(c._members[0].uty_expr.idx_ast, ast.ExtSlice)
assert isinstance(c._members[1], typy._components.ValueMember)
assert c._members[1].id == "test_acct"
assert isinstance(c._members[1].uty, typy._ty_exprs.UName)
assert c._members[1].uty.id == "Account"
# checking
assert isinstance(c._members[1].ty, typy._ty_exprs.CanonicalTy)
assert c._members[1].ty.fragment == record
assert isinstance(c._members[0].ty.idx, dict)
assert c._members[1].ty.idx["name"].fragment == string
assert c._members[1].ty.idx["account_num"].fragment == string
assert c._members[1].ty.idx["memo"].fragment == py
assert isinstance(c._members[1].ty, typy._ty_exprs.CanonicalTy)
assert c._members[1].ty.fragment == record
# translation and evaluation
assert c._module.test_acct == ("00-12345678",
{ }, "Harry Q. Bovik")
@pytest.fixture
def Listing4(self, Listing1):
@component
def Listing4():
@fn
def hello(account : Listing1.Account):
"""Computes a string greeting."""
name = account.name
"Hello, " + name
hello_test = hello(Listing1.test_acct)
# TODO print
return Listing4
def test_Listing4(self, Listing4):
c = Listing4
assert isinstance(c, typy.Component)
assert c._module.hello(("00-12345678",
{ }, "Harry Q. Bovik")) == "Hello, Harry Q. Bovik"
assert c._module.hello_test == "Hello, Harry Q. Bovik"
@pytest.fixture
def Listing7(self):
@component
def Listing7():
tree(+a) [type] = finsum[
Empty,
Node(tree(+a), tree(+a)),
Leaf(+a)
]
@fn
def map(f : fn[+a, +b],
t : tree(+a)) -> tree(+b):
[t].match
with Empty: Empty
with Node(left, right):
Node(map(f, left), map(f, right))
with Leaf(x): Leaf(f(x))
return Listing7
# TODO type functions
# TODO finsum init_idx
# TODO recursive types
# TODO polymorphic functions
# TODO pattern matching
# TODO name literals
# TODO recursive functions
@pytest.fixture
def Listing9(self, Listing1):
@component
def Listing9():
Transaction [type] = proto[
amount : decimal,
incr : fn[Transaction, unit],
proto : Listing1.Account
]
test_trans [: Transaction]
def _():
amount = 36.50
def incr(self): self.amount += 1
proto = Listing1.test_account
test_trans.incr() # self passed automatically
print(test_trans.name)
return Listing9
# TODO decimal intro
# TODO proto intro
# TODO proto dispatch
# TODO proto attribute access
@pytest.fixture
def Listing10(self):
@component
def Listing10():
# TODO device selection code
# make numpy array + send to device
x [: array[f64]] = [1, 2, 3, 4]
d_x = to_device(x)
# define a typed data-parallel OpenCL kernel
@kernel
def add5(x : buffer[f64]):
gid = get_global_id(0) # OpenCL primitive
x[gid] = x[gid] + 5
# spawn one device thread per element and run
add5(d_x, global_size=d_x.length)
y = d_x.from_device() # retrieve from device
print(y.to_string()) # prints [6. 7, 8, 9]
return Listing10
# TODO imports in translation
# TODO numpy number logic
# TODO numpy array logic
# TODO opencl device selection API
# TODO opencl device transfer API
# TODO opencl kernels
# TODO opencl get_global_id primitive
# TODO opencl buffer lookup
# TODO opencl add
# TODO opencl buffer assignment
# TODO opencl kernel call
# TODO numpy to_string
|
quinot/ansible | refs/heads/devel | lib/ansible/modules/storage/infinidat/infini_pool.py | 27 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2016, Gregory Shulov (gregory.shulov@gmail.com)
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: infini_pool
version_added: 2.3
short_description: Create, Delete and Modify Pools on Infinibox
description:
- This module to creates, deletes or modifies pools on Infinibox.
author: Gregory Shulov (@GR360RY)
options:
name:
description:
- Pool Name
required: true
state:
description:
- Creates/Modifies Pool when present or removes when absent
required: false
default: present
choices: [ "present", "absent" ]
size:
description:
- Pool Physical Capacity in MB, GB or TB units.
If pool size is not set on pool creation, size will be equal to 1TB.
See examples.
required: false
vsize:
description:
- Pool Virtual Capacity in MB, GB or TB units.
If pool vsize is not set on pool creation, Virtual Capacity will be equal to Physical Capacity.
See examples.
required: false
ssd_cache:
description:
- Enable/Disable SSD Cache on Pool
required: false
default: yes
choices: [ "yes", "no" ]
notes:
- Infinibox Admin level access is required for pool modifications
extends_documentation_fragment:
- infinibox
requirements:
- capacity
'''
EXAMPLES = '''
- name: Make sure pool foo exists. Set pool physical capacity to 10TB
infini_pool:
name: foo
size: 10TB
vsize: 10TB
user: admin
password: secret
system: ibox001
- name: Disable SSD Cache on pool
infini_pool:
name: foo
ssd_cache: no
user: admin
password: secret
system: ibox001
'''
RETURN = '''
'''
try:
from capacity import KiB, Capacity
HAS_CAPACITY = True
except ImportError:
HAS_CAPACITY = False
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.infinibox import HAS_INFINISDK, api_wrapper, get_system, infinibox_argument_spec
@api_wrapper
def get_pool(module, system):
"""Return Pool on None"""
try:
return system.pools.get(name=module.params['name'])
except:
return None
@api_wrapper
def create_pool(module, system):
"""Create Pool"""
name = module.params['name']
size = module.params['size']
vsize = module.params['vsize']
ssd_cache = module.params['ssd_cache']
if not module.check_mode:
if not size and not vsize:
pool = system.pools.create(name=name, physical_capacity=Capacity('1TB'), virtual_capacity=Capacity('1TB'))
elif size and not vsize:
pool = system.pools.create(name=name, physical_capacity=Capacity(size), virtual_capacity=Capacity(size))
elif not size and vsize:
pool = system.pools.create(name=name, physical_capacity=Capacity('1TB'), virtual_capacity=Capacity(vsize))
else:
pool = system.pools.create(name=name, physical_capacity=Capacity(size), virtual_capacity=Capacity(vsize))
# Default value of ssd_cache is True. Disable ssd chacing if False
if not ssd_cache:
pool.update_ssd_enabled(ssd_cache)
module.exit_json(changed=True)
@api_wrapper
def update_pool(module, system, pool):
"""Update Pool"""
changed = False
size = module.params['size']
vsize = module.params['vsize']
ssd_cache = module.params['ssd_cache']
# Roundup the capacity to mimic Infinibox behaviour
if size:
physical_capacity = Capacity(size).roundup(6 * 64 * KiB)
if pool.get_physical_capacity() != physical_capacity:
if not module.check_mode:
pool.update_physical_capacity(physical_capacity)
changed = True
if vsize:
virtual_capacity = Capacity(vsize).roundup(6 * 64 * KiB)
if pool.get_virtual_capacity() != virtual_capacity:
if not module.check_mode:
pool.update_virtual_capacity(virtual_capacity)
changed = True
if pool.get_ssd_enabled() != ssd_cache:
if not module.check_mode:
pool.update_ssd_enabled(ssd_cache)
changed = True
module.exit_json(changed=changed)
@api_wrapper
def delete_pool(module, pool):
"""Delete Pool"""
if not module.check_mode:
pool.delete()
module.exit_json(changed=True)
def main():
argument_spec = infinibox_argument_spec()
argument_spec.update(
dict(
name=dict(required=True),
state=dict(default='present', choices=['present', 'absent']),
size=dict(),
vsize=dict(),
ssd_cache=dict(type='bool', default=True)
)
)
module = AnsibleModule(argument_spec, supports_check_mode=True)
if not HAS_INFINISDK:
module.fail_json(msg='infinisdk is required for this module')
if not HAS_CAPACITY:
module.fail_json(msg='The capacity python library is required for this module')
if module.params['size']:
try:
Capacity(module.params['size'])
except:
module.fail_json(msg='size (Physical Capacity) should be defined in MB, GB, TB or PB units')
if module.params['vsize']:
try:
Capacity(module.params['vsize'])
except:
module.fail_json(msg='vsize (Virtual Capacity) should be defined in MB, GB, TB or PB units')
state = module.params['state']
system = get_system(module)
pool = get_pool(module, system)
if state == 'present' and not pool:
create_pool(module, system)
elif state == 'present' and pool:
update_pool(module, system, pool)
elif state == 'absent' and pool:
delete_pool(module, pool)
elif state == 'absent' and not pool:
module.exit_json(changed=False)
if __name__ == '__main__':
main()
|
IITBinterns13/edx-platform-dev | refs/heads/master | cms/djangoapps/contentstore/tests/test_item.py | 2 | from contentstore.tests.test_course_settings import CourseTestCase
from xmodule.modulestore.tests.factories import CourseFactory
from django.core.urlresolvers import reverse
class DeleteItem(CourseTestCase):
def setUp(self):
""" Creates the test course with a static page in it. """
super(DeleteItem, self).setUp()
self.course = CourseFactory.create(org='mitX', number='333', display_name='Dummy Course')
def testDeleteStaticPage(self):
# Add static tab
data = {
'parent_location': 'i4x://mitX/333/course/Dummy_Course',
'template': 'i4x://edx/templates/static_tab/Empty'
}
resp = self.client.post(reverse('clone_item'), data)
self.assertEqual(resp.status_code, 200)
# Now delete it. There was a bug that the delete was failing (static tabs do not exist in draft modulestore).
resp = self.client.post(reverse('delete_item'), resp.content, "application/json")
self.assertEqual(resp.status_code, 200)
|
duqiao/django | refs/heads/master | tests/validators/tests.py | 163 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import io
import os
import re
import types
from datetime import datetime, timedelta
from unittest import TestCase
from django.core.exceptions import ValidationError
from django.core.validators import (
BaseValidator, EmailValidator, MaxLengthValidator, MaxValueValidator,
MinLengthValidator, MinValueValidator, RegexValidator, URLValidator,
int_list_validator, validate_comma_separated_integer_list, validate_email,
validate_integer, validate_ipv4_address, validate_ipv6_address,
validate_ipv46_address, validate_slug, validate_unicode_slug,
)
from django.test import SimpleTestCase
from django.test.utils import str_prefix
from django.utils._os import upath
NOW = datetime.now()
EXTENDED_SCHEMES = ['http', 'https', 'ftp', 'ftps', 'git', 'file']
TEST_DATA = [
# (validator, value, expected),
(validate_integer, '42', None),
(validate_integer, '-42', None),
(validate_integer, -42, None),
(validate_integer, -42.5, ValidationError),
(validate_integer, None, ValidationError),
(validate_integer, 'a', ValidationError),
(validate_integer, '\n42', ValidationError),
(validate_integer, '42\n', ValidationError),
(validate_email, 'email@here.com', None),
(validate_email, 'weirder-email@here.and.there.com', None),
(validate_email, 'email@[127.0.0.1]', None),
(validate_email, 'email@[2001:dB8::1]', None),
(validate_email, 'email@[2001:dB8:0:0:0:0:0:1]', None),
(validate_email, 'email@[::fffF:127.0.0.1]', None),
(validate_email, 'example@valid-----hyphens.com', None),
(validate_email, 'example@valid-with-hyphens.com', None),
(validate_email, 'test@domain.with.idn.tld.เคเคฆเคพเคนเคฐเคฃ.เคชเคฐเฅเคเฅเคทเคพ', None),
(validate_email, 'email@localhost', None),
(EmailValidator(whitelist=['localdomain']), 'email@localdomain', None),
(validate_email, '"test@test"@example.com', None),
(validate_email, 'example@atm.%s' % ('a' * 63), None),
(validate_email, 'example@%s.atm' % ('a' * 63), None),
(validate_email, 'example@%s.%s.atm' % ('a' * 63, 'b' * 10), None),
(validate_email, 'example@atm.%s' % ('a' * 64), ValidationError),
(validate_email, 'example@%s.atm.%s' % ('b' * 64, 'a' * 63), ValidationError),
(validate_email, None, ValidationError),
(validate_email, '', ValidationError),
(validate_email, 'abc', ValidationError),
(validate_email, 'abc@', ValidationError),
(validate_email, 'abc@bar', ValidationError),
(validate_email, 'a @x.cz', ValidationError),
(validate_email, 'abc@.com', ValidationError),
(validate_email, 'something@@somewhere.com', ValidationError),
(validate_email, 'email@127.0.0.1', ValidationError),
(validate_email, 'email@[127.0.0.256]', ValidationError),
(validate_email, 'email@[2001:db8::12345]', ValidationError),
(validate_email, 'email@[2001:db8:0:0:0:0:1]', ValidationError),
(validate_email, 'email@[::ffff:127.0.0.256]', ValidationError),
(validate_email, 'example@invalid-.com', ValidationError),
(validate_email, 'example@-invalid.com', ValidationError),
(validate_email, 'example@invalid.com-', ValidationError),
(validate_email, 'example@inv-.alid-.com', ValidationError),
(validate_email, 'example@inv-.-alid.com', ValidationError),
(validate_email, 'test@example.com\n\n<script src="x.js">', ValidationError),
# Quoted-string format (CR not allowed)
(validate_email, '"\\\011"@here.com', None),
(validate_email, '"\\\012"@here.com', ValidationError),
(validate_email, 'trailingdot@shouldfail.com.', ValidationError),
# Max length of domain name labels is 63 characters per RFC 1034.
(validate_email, 'a@%s.us' % ('a' * 63), None),
(validate_email, 'a@%s.us' % ('a' * 64), ValidationError),
# Trailing newlines in username or domain not allowed
(validate_email, 'a@b.com\n', ValidationError),
(validate_email, 'a\n@b.com', ValidationError),
(validate_email, '"test@test"\n@example.com', ValidationError),
(validate_email, 'a@[127.0.0.1]\n', ValidationError),
(validate_slug, 'slug-ok', None),
(validate_slug, 'longer-slug-still-ok', None),
(validate_slug, '--------', None),
(validate_slug, 'nohyphensoranything', None),
(validate_slug, 'a', None),
(validate_slug, '1', None),
(validate_slug, 'a1', None),
(validate_slug, '', ValidationError),
(validate_slug, ' text ', ValidationError),
(validate_slug, ' ', ValidationError),
(validate_slug, 'some@mail.com', ValidationError),
(validate_slug, 'ไฝ ๅฅฝ', ValidationError),
(validate_slug, 'ไฝ ๅฅฝ', ValidationError),
(validate_slug, '\n', ValidationError),
(validate_slug, 'trailing-newline\n', ValidationError),
(validate_unicode_slug, 'slug-ok', None),
(validate_unicode_slug, 'longer-slug-still-ok', None),
(validate_unicode_slug, '--------', None),
(validate_unicode_slug, 'nohyphensoranything', None),
(validate_unicode_slug, 'a', None),
(validate_unicode_slug, '1', None),
(validate_unicode_slug, 'a1', None),
(validate_unicode_slug, 'ไฝ ๅฅฝ', None),
(validate_unicode_slug, '', ValidationError),
(validate_unicode_slug, ' text ', ValidationError),
(validate_unicode_slug, ' ', ValidationError),
(validate_unicode_slug, 'some@mail.com', ValidationError),
(validate_unicode_slug, '\n', ValidationError),
(validate_unicode_slug, 'ไฝ ๅฅฝ', ValidationError),
(validate_unicode_slug, 'trailing-newline\n', ValidationError),
(validate_ipv4_address, '1.1.1.1', None),
(validate_ipv4_address, '255.0.0.0', None),
(validate_ipv4_address, '0.0.0.0', None),
(validate_ipv4_address, '256.1.1.1', ValidationError),
(validate_ipv4_address, '25.1.1.', ValidationError),
(validate_ipv4_address, '25,1,1,1', ValidationError),
(validate_ipv4_address, '25.1 .1.1', ValidationError),
(validate_ipv4_address, '1.1.1.1\n', ValidationError),
# validate_ipv6_address uses django.utils.ipv6, which
# is tested in much greater detail in its own testcase
(validate_ipv6_address, 'fe80::1', None),
(validate_ipv6_address, '::1', None),
(validate_ipv6_address, '1:2:3:4:5:6:7:8', None),
(validate_ipv6_address, '1:2', ValidationError),
(validate_ipv6_address, '::zzz', ValidationError),
(validate_ipv6_address, '12345::', ValidationError),
(validate_ipv46_address, '1.1.1.1', None),
(validate_ipv46_address, '255.0.0.0', None),
(validate_ipv46_address, '0.0.0.0', None),
(validate_ipv46_address, 'fe80::1', None),
(validate_ipv46_address, '::1', None),
(validate_ipv46_address, '1:2:3:4:5:6:7:8', None),
(validate_ipv46_address, '256.1.1.1', ValidationError),
(validate_ipv46_address, '25.1.1.', ValidationError),
(validate_ipv46_address, '25,1,1,1', ValidationError),
(validate_ipv46_address, '25.1 .1.1', ValidationError),
(validate_ipv46_address, '1:2', ValidationError),
(validate_ipv46_address, '::zzz', ValidationError),
(validate_ipv46_address, '12345::', ValidationError),
(validate_comma_separated_integer_list, '1', None),
(validate_comma_separated_integer_list, '12', None),
(validate_comma_separated_integer_list, '1,2', None),
(validate_comma_separated_integer_list, '1,2,3', None),
(validate_comma_separated_integer_list, '10,32', None),
(validate_comma_separated_integer_list, '', ValidationError),
(validate_comma_separated_integer_list, 'a', ValidationError),
(validate_comma_separated_integer_list, 'a,b,c', ValidationError),
(validate_comma_separated_integer_list, '1, 2, 3', ValidationError),
(validate_comma_separated_integer_list, ',', ValidationError),
(validate_comma_separated_integer_list, '1,2,3,', ValidationError),
(validate_comma_separated_integer_list, '1,2,', ValidationError),
(validate_comma_separated_integer_list, ',1', ValidationError),
(validate_comma_separated_integer_list, '1,,2', ValidationError),
(int_list_validator(sep='.'), '1.2.3', None),
(int_list_validator(sep='.'), '1,2,3', ValidationError),
(int_list_validator(sep='.'), '1.2.3\n', ValidationError),
(MaxValueValidator(10), 10, None),
(MaxValueValidator(10), -10, None),
(MaxValueValidator(10), 0, None),
(MaxValueValidator(NOW), NOW, None),
(MaxValueValidator(NOW), NOW - timedelta(days=1), None),
(MaxValueValidator(0), 1, ValidationError),
(MaxValueValidator(NOW), NOW + timedelta(days=1), ValidationError),
(MinValueValidator(-10), -10, None),
(MinValueValidator(-10), 10, None),
(MinValueValidator(-10), 0, None),
(MinValueValidator(NOW), NOW, None),
(MinValueValidator(NOW), NOW + timedelta(days=1), None),
(MinValueValidator(0), -1, ValidationError),
(MinValueValidator(NOW), NOW - timedelta(days=1), ValidationError),
(MaxLengthValidator(10), '', None),
(MaxLengthValidator(10), 10 * 'x', None),
(MaxLengthValidator(10), 15 * 'x', ValidationError),
(MinLengthValidator(10), 15 * 'x', None),
(MinLengthValidator(10), 10 * 'x', None),
(MinLengthValidator(10), '', ValidationError),
(URLValidator(EXTENDED_SCHEMES), 'file://localhost/path', None),
(URLValidator(EXTENDED_SCHEMES), 'git://example.com/', None),
(URLValidator(EXTENDED_SCHEMES), 'git://-invalid.com', ValidationError),
# Trailing newlines not accepted
(URLValidator(), 'http://www.djangoproject.com/\n', ValidationError),
(URLValidator(), 'http://[::ffff:192.9.5.5]\n', ValidationError),
# Trailing junk does not take forever to reject
(URLValidator(), 'http://www.asdasdasdasdsadfm.com.br ', ValidationError),
(URLValidator(), 'http://www.asdasdasdasdsadfm.com.br z', ValidationError),
(BaseValidator(True), True, None),
(BaseValidator(True), False, ValidationError),
(RegexValidator(), '', None),
(RegexValidator(), 'x1x2', None),
(RegexValidator('[0-9]+'), 'xxxxxx', ValidationError),
(RegexValidator('[0-9]+'), '1234', None),
(RegexValidator(re.compile('[0-9]+')), '1234', None),
(RegexValidator('.*'), '', None),
(RegexValidator(re.compile('.*')), '', None),
(RegexValidator('.*'), 'xxxxx', None),
(RegexValidator('x'), 'y', ValidationError),
(RegexValidator(re.compile('x')), 'y', ValidationError),
(RegexValidator('x', inverse_match=True), 'y', None),
(RegexValidator(re.compile('x'), inverse_match=True), 'y', None),
(RegexValidator('x', inverse_match=True), 'x', ValidationError),
(RegexValidator(re.compile('x'), inverse_match=True), 'x', ValidationError),
(RegexValidator('x', flags=re.IGNORECASE), 'y', ValidationError),
(RegexValidator('a'), 'A', ValidationError),
(RegexValidator('a', flags=re.IGNORECASE), 'A', None),
]
def create_path(filename):
return os.path.abspath(os.path.join(os.path.dirname(upath(__file__)), filename))
# Add valid and invalid URL tests.
# This only tests the validator without extended schemes.
with io.open(create_path('valid_urls.txt'), encoding='utf8') as f:
for url in f:
TEST_DATA.append((URLValidator(), url.strip(), None))
with io.open(create_path('invalid_urls.txt'), encoding='utf8') as f:
for url in f:
TEST_DATA.append((URLValidator(), url.strip(), ValidationError))
def create_simple_test_method(validator, expected, value, num):
if expected is not None and issubclass(expected, Exception):
test_mask = 'test_%s_raises_error_%d'
def test_func(self):
# assertRaises not used, so as to be able to produce an error message
# containing the tested value
try:
validator(value)
except expected:
pass
else:
self.fail("%s not raised when validating '%s'" % (
expected.__name__, value))
else:
test_mask = 'test_%s_%d'
def test_func(self):
try:
self.assertEqual(expected, validator(value))
except ValidationError as e:
self.fail("Validation of '%s' failed. Error message was: %s" % (
value, str(e)))
if isinstance(validator, types.FunctionType):
val_name = validator.__name__
else:
val_name = validator.__class__.__name__
test_name = test_mask % (val_name, num)
return test_name, test_func
# Dynamically assemble a test class with the contents of TEST_DATA
class TestSimpleValidators(SimpleTestCase):
def test_single_message(self):
v = ValidationError('Not Valid')
self.assertEqual(str(v), str_prefix("[%(_)s'Not Valid']"))
self.assertEqual(repr(v), str_prefix("ValidationError([%(_)s'Not Valid'])"))
def test_message_list(self):
v = ValidationError(['First Problem', 'Second Problem'])
self.assertEqual(str(v), str_prefix("[%(_)s'First Problem', %(_)s'Second Problem']"))
self.assertEqual(repr(v), str_prefix("ValidationError([%(_)s'First Problem', %(_)s'Second Problem'])"))
def test_message_dict(self):
v = ValidationError({'first': ['First Problem']})
self.assertEqual(str(v), str_prefix("{%(_)s'first': [%(_)s'First Problem']}"))
self.assertEqual(repr(v), str_prefix("ValidationError({%(_)s'first': [%(_)s'First Problem']})"))
def test_regex_validator_flags(self):
try:
RegexValidator(re.compile('a'), flags=re.IGNORECASE)
except TypeError:
pass
else:
self.fail("TypeError not raised when flags and pre-compiled regex in RegexValidator")
def test_max_length_validator_message(self):
v = MaxLengthValidator(16, message='"%(value)s" has more than %(limit_value)d characters.')
with self.assertRaisesMessage(ValidationError, '"djangoproject.com" has more than 16 characters.'):
v('djangoproject.com')
test_counter = 0
for validator, value, expected in TEST_DATA:
name, method = create_simple_test_method(validator, expected, value, test_counter)
setattr(TestSimpleValidators, name, method)
test_counter += 1
class TestValidatorEquality(TestCase):
"""
Tests that validators have valid equality operators (#21638)
"""
def test_regex_equality(self):
self.assertEqual(
RegexValidator(r'^(?:[a-z0-9\.\-]*)://'),
RegexValidator(r'^(?:[a-z0-9\.\-]*)://'),
)
self.assertNotEqual(
RegexValidator(r'^(?:[a-z0-9\.\-]*)://'),
RegexValidator(r'^(?:[0-9\.\-]*)://'),
)
self.assertEqual(
RegexValidator(r'^(?:[a-z0-9\.\-]*)://', "oh noes", "invalid"),
RegexValidator(r'^(?:[a-z0-9\.\-]*)://', "oh noes", "invalid"),
)
self.assertNotEqual(
RegexValidator(r'^(?:[a-z0-9\.\-]*)://', "oh", "invalid"),
RegexValidator(r'^(?:[a-z0-9\.\-]*)://', "oh noes", "invalid"),
)
self.assertNotEqual(
RegexValidator(r'^(?:[a-z0-9\.\-]*)://', "oh noes", "invalid"),
RegexValidator(r'^(?:[a-z0-9\.\-]*)://'),
)
self.assertNotEqual(
RegexValidator('', flags=re.IGNORECASE),
RegexValidator(''),
)
self.assertNotEqual(
RegexValidator(''),
RegexValidator('', inverse_match=True),
)
def test_regex_equality_nocache(self):
pattern = r'^(?:[a-z0-9\.\-]*)://'
left = RegexValidator(pattern)
re.purge()
right = RegexValidator(pattern)
self.assertEqual(
left,
right,
)
def test_regex_equality_blank(self):
self.assertEqual(
RegexValidator(),
RegexValidator(),
)
def test_email_equality(self):
self.assertEqual(
EmailValidator(),
EmailValidator(),
)
self.assertNotEqual(
EmailValidator(message="BAD EMAIL"),
EmailValidator(),
)
self.assertEqual(
EmailValidator(message="BAD EMAIL", code="bad"),
EmailValidator(message="BAD EMAIL", code="bad"),
)
def test_basic_equality(self):
self.assertEqual(
MaxValueValidator(44),
MaxValueValidator(44),
)
self.assertNotEqual(
MaxValueValidator(44),
MinValueValidator(44),
)
self.assertNotEqual(
MinValueValidator(45),
MinValueValidator(11),
)
|
dtrip/weevely3 | refs/heads/master | tests/test_file_upload2web.py | 2 | from testfixtures import log_capture
from tests.base_test import BaseTest
from tests import config
from core.sessions import SessionURL
from core import modules
import utils
from core import messages
import subprocess
import tempfile
import os
def setUpModule():
subprocess.check_output("""
BASE_FOLDER="{config.base_folder}/test_file_upload2web/"
rm -rf "$BASE_FOLDER"
mkdir -p "$BASE_FOLDER/0777/0555/0777/0555"
chown www-data: -R "$BASE_FOLDER/"
chmod 0777 "$BASE_FOLDER/0777"
chmod 0777 "$BASE_FOLDER/0777/0555/0777/"
chmod 0555 "$BASE_FOLDER/0777/0555"
chmod 0555 "$BASE_FOLDER/0777/0555/0777/0555"
""".format(
config = config
), shell=True)
class UploadWeb(BaseTest):
def setUp(self):
self.session = SessionURL(
self.url,
self.password,
volatile = True
)
modules.load_modules(self.session)
# Create the folder tree
self.folders_rel = [
'test_file_upload2web/0777/',
'test_file_upload2web/0777/0555/',
'test_file_upload2web/0777/0555/0777/',
'test_file_upload2web/0777/0555/0777/0555'
]
self.run_argv = modules.loaded['file_upload2web'].run_argv
def _get_path_url(self, folder_deepness, filename):
rurl = os.path.sep.join([
config.base_url.rstrip('/'),
self.folders_rel[folder_deepness].strip('/'),
filename.lstrip('/')]
)
rpath = os.path.sep.join([
config.base_folder.rstrip('/'),
self.folders_rel[folder_deepness].strip('/'),
filename.lstrip('/')]
)
return rpath, rurl
def test_file_uploadweb(self):
# Upload lfile with a specific path
temp_file = tempfile.NamedTemporaryFile()
rpath, rurl = self._get_path_url(0, 'f1')
self.assertEqual(
self.run_argv([ temp_file.name, rpath ]),
[ ( rpath, rurl ) ]
)
temp_file.close()
# Upload lfile guessing first writable path starting from [0]
temp_file = tempfile.NamedTemporaryFile()
temp_folder, temp_filename = os.path.split(temp_file.name)
rpath, rurl = self._get_path_url(0, temp_filename)
self.assertEqual(
self.run_argv([ temp_file.name, self.folders_rel[0] ]),
[ ( rpath, rurl ) ]
)
temp_file.close()
# Upload lfile guessing first writable path from [1],
# that is [2]
temp_file = tempfile.NamedTemporaryFile()
temp_folder, temp_filename = os.path.split(temp_file.name)
rpath, rurl = self._get_path_url(2, temp_filename)
self.assertEqual(
self.run_argv([ temp_file.name, self.folders_rel[1] ]),
[ ( rpath, rurl ) ]
)
temp_file.close()
def test_file_uploadweb_content(self):
# Upload content with fake lfile guessing first writable path from [1],
# that is [2]
temp_file_name = '/tmp/nonexistant'
temp_folder, temp_filename = os.path.split(temp_file_name)
rpath, rurl = self._get_path_url(2, temp_filename)
self.assertEqual(
self.run_argv([ temp_file_name, self.folders_rel[1], '-content', '1' ]),
[ ( rpath, rurl ) ]
)
@log_capture()
def test_uploadweb_errs(self, log_captured):
# Upload a not existant lpath
self.assertIsNone(self.run_argv([ 'bogus', self.folders_rel[0] ]))
self.assertEqual(log_captured.records[-1].msg[:18],
messages.generic.error_loading_file_s_s[:18])
# Upload a not existant rpath
temp_file = tempfile.NamedTemporaryFile()
self.assertIsNone(self.run_argv([ temp_file.name, self.folders_rel[0] + '/bogus/bogus' ]))
self.assertEqual(log_captured.records[-1].msg,
messages.module_file_upload.failed_upload_file)
# Upload a not writable folder
temp_file = tempfile.NamedTemporaryFile()
self.assertIsNone(self.run_argv([ temp_file.name, self.folders_rel[3] + '/bogus' ]))
self.assertEqual(log_captured.records[-1].msg,
messages.module_file_upload.failed_upload_file)
|
w1ll1am23/home-assistant | refs/heads/dev | tests/components/apple_tv/__init__.py | 9 | """Tests for Apple TV."""
import pytest
# Make asserts in the common module display differences
pytest.register_assert_rewrite("tests.components.apple_tv.common")
|
Cerberus98/tortilla | refs/heads/master | setup.py | 1 | #!/usr/bin/env python
import distutils.core
import pip.download
from pip.req import parse_requirements
try:
import setuptools
except ImportError:
pass
version = "0.01"
def requires(path):
return [r.name for r in parse_requirements(path, session=pip.download.PipSession())
if r]
distutils.core.setup(
name="tortilla",
version=version,
packages=["tortilla"],
author="Matt Dietz",
author_email="matthew.dietz@gmail.com",
url="",
download_url="",
license="Apache 2",
description="",
entry_points={
"tortilla.config": [
"foo = tortilla.poo:foo",
],
"console_scripts": [
"foo = bar"
]},
install_requires=requires("requirements.txt"))
|
scrollback/kuma | refs/heads/master | vendor/packages/pylint/test/input/__init__.py | 36 | """test"""
|
mitdrc/director | refs/heads/master | src/python/director/robotposegui.py | 4 | import director
from PythonQt import QtCore, QtGui, QtUiTools
import lcm
import sys
import json
import time
import os
import bot_core as lcmbotcore
import functools
from collections import OrderedDict
class LCMWrapper(object):
'''
A basic class provided some convenience methods around LCM.
'''
def __init__(self):
self.lc = lcm.LCM()
def subscribe(self, channel, callback):
return self.lc.subscribe(channel, callback)
def captureMessage(self, channel, messageClass):
messages = []
def handleMessage(channel, messageData):
messages.append(messageClass.decode(messageData))
subscription = self.subscribe(channel, handleMessage)
while not messages:
self.lc.handle()
time.sleep(0.01)
self.lc.unsubscribe(subscription)
return messages[0]
def publish(self, channel, message):
self.lc.publish(channel, message.encode())
def captureRobotState():
'''
Blocks until a new LCM message is received on the EST_ROBOT_STATE channel,
returns the new message.
'''
return lcmWrapper.captureMessage('EST_ROBOT_STATE', lcmbotcore.robot_state_t)
def capturePostureGoal():
'''
Blocks until a new LCM message is received on the POSTURE_GOAL channel,
returns the new message.
'''
return lcmWrapper.captureMessage('POSTURE_GOAL', lcmbotcore.joint_angles_t)
def capturePoseFromMessage(messageCaptureFunction):
msg = messageCaptureFunction()
joints = dict()
for joint, position in zip(msg.joint_name, msg.joint_position):
joints[joint] = position
return joints
def getUtime():
return int(time.time() * 1e6)
directorConfigFile = None
directorConfig = None
def setDirectorConfigFile(filename):
global directorConfig, directorConfigFile
directorConfigFile = filename
directorConfig = None
def getDefaultDirectorConfigFile():
return os.path.join(os.environ['DRC_BASE'], 'software/models/atlas_v5/director_config.json')
def getDirectorConfig():
global directorConfig, directorConfigFile
if directorConfig is None:
if directorConfigFile is None:
directorConfigFile = getDefaultDirectorConfigFile()
with open(directorConfigFile) as configFile:
directorConfig = json.load(configFile)
return directorConfig
def getJointSets():
'''
Returns a dictionary of joint sets.
'''
config = getDirectorConfig()
jointGroups = config['teleopJointGroups']
jointSets = OrderedDict()
groups = ['left arm', 'right arm', 'back', 'left leg', 'right leg', 'base']
for group in jointGroups:
groupName = group['name'].lower()
if groupName in groups:
jointSets[groupName] = group['joints']
return jointSets
def findPrefixInJointNames(jointNames, armJointList):
for name in jointNames:
if name in armJointList:
return True
return False
def getLeftArmInJoints(joints, jointGroups):
for jointGroup in jointGroups:
if jointGroup['name'] == 'Left Arm':
return findPrefixInJointNames(joints.keys(), jointGroup['joints'])
return False
def getRightArmInJoints(joints, jointGroups):
for jointGroup in jointGroups:
if jointGroup['name'] == 'Right Arm':
return findPrefixInJointNames(joints.keys(), jointGroup['joints'])
return False
def getJointNamesForPoseType(poseType):
'''
Returns a list of joint names for each part of the robot described in the
poseType argument. For example, if poseType is the string 'left arm, right arm',
then the joint names for the left and right arms is returned. Supported robot
parts are left arm, right arm, back.
'''
jointSets = getJointSets()
jointNames = []
for name, jointSet in jointSets.iteritems():
if name in poseType:
jointNames += jointSet
return jointNames
def updateComboStrings(combo, strings, defaultSelection):
'''
Clears the given combobox and then adds the given strings to the combo.
Restores the combo's current value, or if the combo was empty, uses the
string given in defaultSelection.
'''
currentText = str(combo.currentText) if combo.count else defaultSelection
combo.clear()
for text in strings:
if not text:
combo.insertSeparator(combo.count)
continue
combo.addItem(text)
if text == currentText:
combo.setCurrentIndex(combo.count - 1)
def loadConfig(filename):
'''
Reads the contents of filename and parses it as a json document, returns
the result as a dict.
'''
assert os.path.isfile(filename)
with open(filename, 'r') as infile:
return json.load(infile)
def saveConfig(config, filename):
'''
Overwrites the file at filename with a json string generated from the given
config argument, a dict.
'''
with open(filename, 'w') as outfile:
json.dump(config, outfile, indent=2, separators=(',', ': '), sort_keys=True)
def storePose(poseType, captureMethod, group, name, description, outFile):
jointSet = getJointNamesForPoseType(poseType)
assert len(jointSet)
poseJoints = captureMethod['function']()
joints = dict()
for joint, position in poseJoints.iteritems():
if joint in jointSet:
joints[joint] = position
posture = dict()
posture['name'] = name
posture['description'] = description
posture['allow_mirror'] = True
posture['joints'] = joints
configFile = getDirectorConfig()
jointGroups = configFile['teleopJointGroups']
# determine a default value for nominal_handedness
hasLeft = getLeftArmInJoints(joints, jointGroups)
hasRight = getRightArmInJoints(joints, jointGroups)
posture['nominal_handedness'] = 'none'
if hasLeft != hasRight:
posture['nominal_handedness'] = 'left' if hasLeft else 'right'
config = loadConfig(outFile)
postures = config.setdefault(group, [])
for existingPosture in postures:
if existingPosture['name'] == name:
postures.remove(existingPosture)
postures.append(posture)
saveConfig(config, outFile)
def applyMirror(joints):
'''
joints is a dict where the keys are joint name strings and the values are
joint positions. This function renames left arm and right arm joints
and flips the sign of the joint position as required, and also flips the sign
on back_bkz. Note that other back joints are not modified by this function.
Returns the result as a new dictionary in the same format.
'''
def toLeft(jointName):
'''
If the right arm joint can be found in the list, insert the left
hand instead. this assumes that the corresponding joint is in the
same position in each list
'''
if jointName in rightArmJointList:
return leftArmJointList[rightArmJointList.index(jointName)]
return jointName
def toRight(jointName):
if jointName in leftArmJointList:
return rightArmJointList[leftArmJointList.index(jointName)]
return jointName
def flipLeftRight(jointName):
if jointName in leftArmJointList:
return toRight(jointName)
else:
return toLeft(jointName)
signFlips = getDirectorConfig()['mirrorJointSignFlips']
configFile = getDirectorConfig()
jointGroups = configFile['teleopJointGroups']
leftArmJointList = filter(lambda thisJointGroup: thisJointGroup['name'] == 'Left Arm', jointGroups)[0]['joints']
rightArmJointList = filter(lambda thisJointGroup: thisJointGroup['name'] == 'Right Arm', jointGroups)[0]['joints']
flipped = {}
for name, position in joints.iteritems():
name = flipLeftRight(name)
if name in signFlips:
position = -position
flipped[name] = position
return flipped
def publishPostureGoal(joints, postureName, channel='POSTURE_GOAL'):
'''
Given a dict mapping joint name strings to joint positions, creates a
joint_angles_t LCM message and publishes the result on the given channel name.
'''
msg = lcmbotcore.joint_angles_t()
msg.utime = getUtime()
for name, position in joints.iteritems():
msg.joint_name.append(name)
msg.joint_position.append(position)
msg.num_joints = len(msg.joint_name)
lcmWrapper.publish(channel, msg)
lcmWrapper.publish('POSTURE_GOAL_CANNED', msg)
publishSystemStatus('sending posture goal: ' + postureName)
def publishTrajGoal(name, channel=''):
import drc as lcmdrc
msg = lcmdrc.behavior_command_t()
msg.utime = getUtime()
msg.command = name
lcmWrapper.publish('EE_TRAJ_GOAL', msg)
publishSystemStatus('sending EE traj goal: ' + name)
def publishSystemStatus(text):
msg = lcmbotcore.system_status_t()
msg.utime = getUtime()
msg.system = 5
msg.importance = 0
msg.frequency = 0
msg.value = text
lcmWrapper.publish('SYSTEM_STATUS', msg)
class SendPosturePanel(object):
def __init__(self, parent):
self.parent = parent
self.ui = parent.ui
self.selectedPosture = None
self.setup()
def setup(self):
self.ui.postureFilter.hide()
self.ui.postureFilterLabel.hide()
self.ui.sendLeftButton.connect(QtCore.SIGNAL('clicked()'), self.onLeftClicked)
self.ui.sendRightButton.connect(QtCore.SIGNAL('clicked()'), self.onRightClicked)
self.ui.sendDefaultButton.connect(QtCore.SIGNAL('clicked()'), self.onDefaultClicked)
self.ui.sendPostureGroupCombo.connect(QtCore.SIGNAL('currentIndexChanged(const QString&)'), self.onGroupComboChanged)
self.ui.postureListWidget.connect(QtCore.SIGNAL('currentRowChanged(int)'), self.onPostureSelected)
self.updateGroupCombo()
self.updatePostureListWidget()
def updateGroupCombo(self):
groupNames = self.parent.getGroupNames()
try:
groupNames.remove('General')
except ValueError:
pass
groupNames.insert(0, '')
groupNames.insert(0, 'General')
groupNames.insert(0, 'All')
self.ui.sendPostureGroupCombo.blockSignals(True)
updateComboStrings(self.ui.sendPostureGroupCombo, groupNames, 'All')
self.ui.sendPostureGroupCombo.blockSignals(False)
def setSelectedGroup(self, groupName):
index = self.ui.sendPostureGroupCombo.findText(groupName)
if index < 0: index = 0
self.ui.sendPostureGroupCombo.setCurrentIndex(index)
def onGroupComboChanged(self):
self.updatePostureListWidget()
def getSelectedGroup(self):
return str(self.ui.sendPostureGroupCombo.currentText)
def updatePostureListWidget(self):
groupName = self.getSelectedGroup()
self.currentPostures = self.parent.getPosturesInGroup(groupName)
self.ui.postureListWidget.blockSignals(True)
self.ui.postureListWidget.clear()
for posture in self.currentPostures:
self.ui.postureListWidget.addItem(posture['name'])
self.ui.postureListWidget.setCurrentRow(0)
self.ui.postureListWidget.blockSignals(False)
self.onPostureSelected()
def getSelectedPosture(self):
currentItem = self.ui.postureListWidget.currentItem()
if not currentItem:
return None
postureName = str(currentItem.text())
for posture in self.currentPostures:
if posture['name'] == postureName:
return posture
def getPostureCanBeMirrored(self, posture):
return (posture['allow_mirror']
and self.getNominalHandedness(posture) in ('left', 'right')
and 'mirrorJointSignFlips' in getDirectorConfig())
def getNominalHandedness(self, posture):
handedness = posture['nominal_handedness']
assert handedness in ('left', 'right', 'none')
return handedness
def onPostureSelected(self):
self.selectedPosture = self.getSelectedPosture()
self.updateDescriptionLabel()
self.ui.sendDefaultButton.setVisible(False)
self.ui.sendLeftButton.setVisible(True)
self.ui.sendRightButton.setVisible(True)
self.ui.sendDefaultButton.setEnabled(False)
self.ui.sendLeftButton.setEnabled(False)
self.ui.sendRightButton.setEnabled(False)
if not self.selectedPosture:
return
if self.getPostureCanBeMirrored(self.selectedPosture):
self.ui.sendLeftButton.setEnabled(True)
self.ui.sendRightButton.setEnabled(True)
else:
self.ui.sendLeftButton.setVisible(False)
self.ui.sendRightButton.setVisible(False)
self.ui.sendDefaultButton.setVisible(True)
self.ui.sendDefaultButton.setEnabled(True)
def updateDescriptionLabel(self):
description = self.selectedPosture['description'] if self.selectedPosture else 'none'
self.ui.descriptionLabel.setText('Description: ' + str(description))
def onGroupsChanged(self):
self.updateGroupCombo()
def onPostureAdded():
self.updatePostureListWidget()
def onLeftClicked(self):
joints = self.selectedPosture['joints']
if self.getNominalHandedness(self.selectedPosture) == 'right':
joints = applyMirror(joints)
publishPostureGoal(joints, self.selectedPosture['name'] + ' left')
def onRightClicked(self):
joints = self.selectedPosture['joints']
if self.getNominalHandedness(self.selectedPosture) == 'left':
joints = applyMirror(joints)
publishPostureGoal(joints, self.selectedPosture['name'] + ' right')
def onDefaultClicked(self):
joints = self.selectedPosture['joints']
publishPostureGoal(joints, self.selectedPosture['name'] + ' default')
def saveSettings(self, settings):
settings.setValue('sendPose/currentGroup', self.getSelectedGroup())
def restoreSettings(self, settings):
self.setSelectedGroup(str(settings.value('sendPose/currentGroup', 'All')))
class CapturePanel(object):
def __init__(self, parent):
self.parent = parent
self.ui = parent.ui
self.captureMethods = []
self.setup()
def setup(self):
self.ui.captureButton.connect(QtCore.SIGNAL('clicked()'), self.onCaptureClicked)
self.ui.groupCombo.connect(QtCore.SIGNAL('currentIndexChanged(const QString&)'), self.onGroupComboChanged)
self.updateGroupCombo()
self.initCaptureMethods()
jointSetNames = getJointSets().keys()
updateComboStrings(self.ui.jointSetCombo, jointSetNames, jointSetNames[0])
def updateGroupCombo(self):
groupNames = self.parent.getGroupNames()
try:
groupNames.remove('General')
except ValueError:
pass
groupNames.insert(0, '')
groupNames.insert(0, 'General')
groupNames.append('')
groupNames.append('New group...')
self.ui.groupCombo.blockSignals(True)
updateComboStrings(self.ui.groupCombo, groupNames, 'General')
self.ui.groupCombo.blockSignals(False)
def setSelectedGroup(self, groupName):
index = self.ui.groupCombo.findText(groupName)
if index < 0: index = 0
self.ui.groupCombo.setCurrentIndex(index)
def getSelectedGroup(self):
return str(self.ui.groupCombo.currentText)
def onGroupComboChanged(self):
if str(self.ui.groupCombo.currentText) == 'New group...':
groupName = self.parent.messageBoxInput('Enter new group name', 'Group name:')
if not groupName or groupName == '':
self.setSelectedGroup('General')
return
groupName = str(groupName)
self.parent.addNewGroup(groupName)
self.setSelectedGroup(groupName)
def onGroupsChanged(self):
self.updateGroupCombo()
def saveSettings(self, settings):
settings.setValue('sendPose/currentGroup', self.getSelectedGroup())
def restoreSettings(self, settings):
self.setSelectedGroup(str(settings.value('capturePanel/currentGroup', 'General')))
def initCaptureMethods(self):
self.addCaptureMethod('EST_ROBOT_STATE lcm channel', functools.partial(capturePoseFromMessage, captureRobotState))
def getCaptureMethod(self, name):
for method in self.captureMethods:
if method['name'] == name:
return method
def addCaptureMethod(self, name, function):
if self.getCaptureMethod(name):
raise Exception('Refusing to re-add capture method: %s' % name)
self.captureMethods.append(dict(name=name, function=function))
captureNames = [method['name'] for method in self.captureMethods]
updateComboStrings(self.ui.captureChannelCombo, captureNames, self.captureMethods[0]['name'])
def onCaptureClicked(self):
captureMethod = self.getCaptureMethod(self.ui.captureChannelCombo.currentText)
group = str(self.ui.groupCombo.currentText)
name = str(self.ui.nameEdit.text)
description = str(self.ui.descriptionEdit.text)
poseType = str(self.ui.jointSetCombo.currentText)
outFile = self.parent.getPoseConfigFile()
if not name:
self.parent.showWarning('Empty field', 'Please enter a name into the text box.')
return
existingPostures = self.parent.getPosturesInGroup(group)
for posture in existingPostures:
if posture['name'] == name:
reply = self.parent.showQuestion('Overwrite posture?', 'Posture with name "%s" already exists.\nDo you want to overwrite?' % name,
QtGui.QMessageBox.Yes | QtGui.QMessageBox.No, QtGui.QMessageBox.No)
if reply == QtGui.QMessageBox.No:
return
storePose(poseType, captureMethod, group, name, description, outFile)
self.parent.onPostureAdded()
def addWidgetsToDict(widgets, d):
for widget in widgets:
if widget.objectName:
d[str(widget.objectName)] = widget
addWidgetsToDict(widget.children(), d)
class WidgetDict(object):
def __init__(self, widgets):
addWidgetsToDict(widgets, self.__dict__)
class MainWindow(object):
def __init__(self):
loader = QtUiTools.QUiLoader()
uifile = QtCore.QFile(':/ui/ddRobotPoseGui.ui')
assert uifile.open(uifile.ReadOnly)
self.widget = loader.load(uifile)
uifile.close()
self.ui = WidgetDict(self.widget.children())
self.widget.setWindowTitle('Robot Pose Utility')
self.messageBoxWarning = functools.partial(QtGui.QMessageBox.warning, self.widget)
self.messageBoxQuestion = functools.partial(QtGui.QMessageBox.question, self.widget)
self.messageBoxInput = functools.partial(QtGui.QInputDialog.getText, self.widget)
assert directorConfigFile is not None
self.configFile = os.path.join(os.path.dirname(directorConfigFile), getDirectorConfig()['postureDatabaseFile'])
if not self.checkConfigFile():
return
self.setup()
self.restoreSettings()
def setup(self):
QtGui.QShortcut(QtGui.QKeySequence('Ctrl+W'), self.widget).connect(QtCore.SIGNAL('activated()'), self.close)
QtGui.QShortcut(QtGui.QKeySequence('Ctrl+Q'), self.widget).connect(QtCore.SIGNAL('activated()'), self.close)
self.capturePanel = CapturePanel(self)
self.sendPosturePanel = SendPosturePanel(self)
def showWarning(self, title, message):
return self.messageBoxWarning(title, message)
def showQuestion(self, title, message, buttons, defaultButton):
return self.messageBoxQuestion(title, message, buttons, defaultButton)
def showInput(self, title, message):
return self.messageBoxInput(title, message)
def getSettings(self):
return QtCore.QSettings('mitdrc', 'RobotPoseGUI')
def saveSettings(self):
settings = self.getSettings()
settings.setValue('currentTabIndex', int(self.ui.tabWidget.currentIndex))
self.capturePanel.saveSettings(settings)
self.sendPosturePanel.saveSettings(settings)
def restoreSettings(self):
settings = self.getSettings()
self.ui.tabWidget.setCurrentIndex(int(settings.value('currentTabIndex', 0)))
self.capturePanel.restoreSettings(settings)
self.sendPosturePanel.restoreSettings(settings)
def close(self):
self.saveSettings()
self.widget.close()
def checkConfigFile(self):
configFile = self.getPoseConfigFile()
if not os.path.isfile(configFile):
self.showWarning('Config file not found', 'Config file not found: %s' % configFile)
self.setEnabled(False)
return False
json.load(open(configFile, 'r'))
return True
def getPoseConfigFile(self):
return self.configFile
def loadConfigFile(self):
if not self.checkConfigFile():
return
config = json.load(open(self.getPoseConfigFile(), 'r'))
if not self.checkPostures(config):
return {}
return config
def getGroupNames(self):
if not self.checkConfigFile():
return []
return sorted(self.loadConfigFile().keys())
def checkPostures(self, config):
for groupName, postures in config.iteritems():
for i, posture in enumerate(postures):
for name in ['name', 'description', 'joints', 'nominal_handedness']:
if name not in posture:
self.ui.showWarning('Format error', 'Format error in posture %d of group "%s". Missing attribute: "%s".' % (i, groupName, name))
self.currentConfig = {}
return False
return True
def getPosturesInGroup(self, groupName):
config = self.loadConfigFile()
postures = []
if groupName == 'All':
for group, postureList in config.iteritems():
for posture in postureList:
posture['name'] = '%s - %s' % (group, posture['name'])
postures += postureList
else:
postures = config.get(groupName, [])
return sorted(postures, key=lambda x: x['name'])
def addNewGroup(self, groupName):
config = self.loadConfigFile()
config.setdefault(groupName, [])
saveConfig(config, self.getPoseConfigFile())
self.capturePanel.onGroupsChanged()
self.sendPosturePanel.onGroupsChanged()
def onPostureAdded(self):
self.sendPosturePanel.updatePostureListWidget()
def main():
try:
configFile = os.path.abspath(sys.argv[1])
setDirectorConfigFile(configFile)
except IndexError:
print 'You must provide a director_config.json file.'
print 'Usage: %s <path to director_config.json>' % sys.argv[0]
return
# create a global instance of the LCMWrapper
global lcmWrapper
lcmWrapper = LCMWrapper()
# start the application
mainWindow = MainWindow()
mainWindow.widget.show()
QtCore.QCoreApplication.instance().exec_()
if __name__ == '__main__':
main()
|
lkorigin/laniakea | refs/heads/master | src/lighthouse/lighthouse/jobs_server.py | 1 | # -*- coding: utf-8 -*-
#
# Copyright (C) 2018-2019 Matthias Klumpp <matthias@tenstral.net>
#
# Licensed under the GNU Lesser General Public License Version 3
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the license, or
# (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this software. If not, see <http://www.gnu.org/licenses/>.
import os
import sys
import json
import logging as log
from laniakea import LocalConfig, LkModule
from laniakea.utils import json_compact_dump
from lighthouse.jobs_worker import JobWorker
import zmq
import zmq.auth
from zmq.auth.ioloop import IOLoopAuthenticator
from zmq.eventloop import ioloop, zmqstream
class JobsServer:
'''
Lighthouse module serving job requests.
'''
def __init__(self, endpoint, pub_queue):
self._server = None
self._ctx = zmq.Context.instance()
lconf = LocalConfig()
self._trusted_keys_dir = lconf.trusted_curve_keys_dir + '/'
self._server_private_key = lconf.secret_curve_keyfile_for_module(LkModule.LIGHTHOUSE)
self._jobs_endpoint = endpoint
self._worker = JobWorker(pub_queue)
def _client_request_received(self, server, msg):
try:
request = json.loads(msg[1])
except json.JSONDecodeError as e:
# we ignore invalid requests
log.info('Received invalid JSON request from client: %s (%s)', msg, str(e))
return
try:
reply = self._worker.process_client_message(request)
except Exception as e:
reply = json_compact_dump({'error': 'Internal Error: {}'.format(e)}, as_bytes=True)
# an empty result means we shouldn't send a message back
if not reply:
return
# ensure we have the bytes of a JSON string
# (workers are permitted to return e.g. True or UTF-8 strings)
if type(reply) is str:
reply = reply.encode('utf-8')
elif type(reply) is not bytes:
reply = json_compact_dump(reply, as_bytes=True)
reply_msg = [msg[0], reply]
log.debug('Sending %s', reply_msg)
server.send_multipart(reply_msg)
def _setup_server(self):
'''
Set up the server with authentication.
'''
self._server = self._ctx.socket(zmq.ROUTER)
server_public, server_secret = zmq.auth.load_certificate(self._server_private_key)
self._server.curve_secretkey = server_secret
self._server.curve_publickey = server_public
self._server.curve_server = True # must come before bind
self._server.bind(self._jobs_endpoint)
server_stream = zmqstream.ZMQStream(self._server)
server_stream.on_recv_stream(self._client_request_received)
def run(self):
if self._server:
log.warning('Tried to run an already running server again.')
return
if not os.path.isfile(self._server_private_key):
log.critical('No private key is installed for Lighthouse. Can not create an encrypted connection.')
sys.exit(2)
if not os.path.isdir(self._trusted_keys_dir):
log.warning('Trusted keys directory does not exist. No clients will be able to make connections to this Lighthouse server.')
# Start an authenticator for this context.
auth = IOLoopAuthenticator(self._ctx)
# NOTE: auth.allow('127.0.0.1') can be used to allow access only from specific IPs (whitelisting)
# Tell authenticator to use the certificate in a directory
auth.configure_curve(domain='*', location=self._trusted_keys_dir)
self._setup_server()
auth.start()
ioloop.IOLoop.instance().start()
|
annegabrielle/secure_adhoc_network_ns-3 | refs/heads/master | doc/testing/source/conf.py | 5 | # -*- coding: utf-8 -*-
#
# ns-3 documentation build configuration file, created by
# sphinx-quickstart on Tue Dec 14 09:00:39 2010.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.pngmath']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'ns-3'
copyright = u'2010, ns-3 project'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = 'ns-3.10'
# The full version, including alpha/beta/rc tags.
release = 'ns-3.10'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'ns-3doc'
# -- Options for LaTeX output --------------------------------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'ns-3.tex', u'ns-3 Testing and Validation',
u'ns-3 project', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'ns-3', u'ns-3 Documentation',
[u'ns-3 project'], 1)
]
|
ibjohansen/acando-react-boilerplate-extended | refs/heads/master | node_modules/node-sass/node_modules/node-gyp/gyp/pylib/gyp/easy_xml.py | 1558 | # Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import re
import os
def XmlToString(content, encoding='utf-8', pretty=False):
""" Writes the XML content to disk, touching the file only if it has changed.
Visual Studio files have a lot of pre-defined structures. This function makes
it easy to represent these structures as Python data structures, instead of
having to create a lot of function calls.
Each XML element of the content is represented as a list composed of:
1. The name of the element, a string,
2. The attributes of the element, a dictionary (optional), and
3+. The content of the element, if any. Strings are simple text nodes and
lists are child elements.
Example 1:
<test/>
becomes
['test']
Example 2:
<myelement a='value1' b='value2'>
<childtype>This is</childtype>
<childtype>it!</childtype>
</myelement>
becomes
['myelement', {'a':'value1', 'b':'value2'},
['childtype', 'This is'],
['childtype', 'it!'],
]
Args:
content: The structured content to be converted.
encoding: The encoding to report on the first XML line.
pretty: True if we want pretty printing with indents and new lines.
Returns:
The XML content as a string.
"""
# We create a huge list of all the elements of the file.
xml_parts = ['<?xml version="1.0" encoding="%s"?>' % encoding]
if pretty:
xml_parts.append('\n')
_ConstructContentList(xml_parts, content, pretty)
# Convert it to a string
return ''.join(xml_parts)
def _ConstructContentList(xml_parts, specification, pretty, level=0):
""" Appends the XML parts corresponding to the specification.
Args:
xml_parts: A list of XML parts to be appended to.
specification: The specification of the element. See EasyXml docs.
pretty: True if we want pretty printing with indents and new lines.
level: Indentation level.
"""
# The first item in a specification is the name of the element.
if pretty:
indentation = ' ' * level
new_line = '\n'
else:
indentation = ''
new_line = ''
name = specification[0]
if not isinstance(name, str):
raise Exception('The first item of an EasyXml specification should be '
'a string. Specification was ' + str(specification))
xml_parts.append(indentation + '<' + name)
# Optionally in second position is a dictionary of the attributes.
rest = specification[1:]
if rest and isinstance(rest[0], dict):
for at, val in sorted(rest[0].iteritems()):
xml_parts.append(' %s="%s"' % (at, _XmlEscape(val, attr=True)))
rest = rest[1:]
if rest:
xml_parts.append('>')
all_strings = reduce(lambda x, y: x and isinstance(y, str), rest, True)
multi_line = not all_strings
if multi_line and new_line:
xml_parts.append(new_line)
for child_spec in rest:
# If it's a string, append a text node.
# Otherwise recurse over that child definition
if isinstance(child_spec, str):
xml_parts.append(_XmlEscape(child_spec))
else:
_ConstructContentList(xml_parts, child_spec, pretty, level + 1)
if multi_line and indentation:
xml_parts.append(indentation)
xml_parts.append('</%s>%s' % (name, new_line))
else:
xml_parts.append('/>%s' % new_line)
def WriteXmlIfChanged(content, path, encoding='utf-8', pretty=False,
win32=False):
""" Writes the XML content to disk, touching the file only if it has changed.
Args:
content: The structured content to be written.
path: Location of the file.
encoding: The encoding to report on the first line of the XML file.
pretty: True if we want pretty printing with indents and new lines.
"""
xml_string = XmlToString(content, encoding, pretty)
if win32 and os.linesep != '\r\n':
xml_string = xml_string.replace('\n', '\r\n')
try:
xml_string = xml_string.encode(encoding)
except Exception:
xml_string = unicode(xml_string, 'latin-1').encode(encoding)
# Get the old content
try:
f = open(path, 'r')
existing = f.read()
f.close()
except:
existing = None
# It has changed, write it
if existing != xml_string:
f = open(path, 'w')
f.write(xml_string)
f.close()
_xml_escape_map = {
'"': '"',
"'": ''',
'<': '<',
'>': '>',
'&': '&',
'\n': '
',
'\r': '
',
}
_xml_escape_re = re.compile(
"(%s)" % "|".join(map(re.escape, _xml_escape_map.keys())))
def _XmlEscape(value, attr=False):
""" Escape a string for inclusion in XML."""
def replace(match):
m = match.string[match.start() : match.end()]
# don't replace single quotes in attrs
if attr and m == "'":
return m
return _xml_escape_map[m]
return _xml_escape_re.sub(replace, value)
|
josegonzalez/python-smooshy | refs/heads/master | setup.py | 1 | from distutils.core import setup
setup(
name='smooshy',
version='1',
author='Jose Gonzalez',
author_email='support@savant.be',
packages=['smooshy'],
scripts=['bin/smooshy'],
url='https://github.com/josegonzalez/smooshy',
license='LICENSE.txt',
description='Automatic lossless image compression',
long_description=open('README.rst').read(),
install_requires=['simplejson']
)
|
vadimtk/chrome4sdp | refs/heads/master | tools/telemetry/third_party/gsutilz/third_party/boto/tests/integration/kms/test_kms.py | 99 | # Copyright (c) 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
import boto
from boto.kms.exceptions import NotFoundException
from tests.compat import unittest
class TestKMS(unittest.TestCase):
def setUp(self):
self.kms = boto.connect_kms()
def test_list_keys(self):
response = self.kms.list_keys()
self.assertIn('Keys', response)
def test_handle_not_found_exception(self):
with self.assertRaises(NotFoundException):
# Describe some key that does not exists
self.kms.describe_key(
key_id='nonexistant_key',
)
|
shujingke/opencog | refs/heads/master | opencog/python/blending/src/chooser/chooser_finder.py | 22 | from blending.src.chooser.choose_null import ChooseNull
from blending.src.chooser.choose_all import ChooseAll
from blending.src.chooser.choose_in_sti_range import ChooseInSTIRange
from blending.util.blending_config import BlendConfig
from blending.util.blending_error import blending_status
__author__ = 'DongMin Kim'
class ChooserFinder(object):
"""Provider class to make atoms chooser instance.
This provider will made the instance of atoms chooser, and returns them to
the blender.
Attributes:
a: An instance of AtomSpace.
last_status: A last status of class.
choosers: An available atoms chooser list.
:type a: opencog.atomspace.AtomSpace
:type last_status: int
:type choosers: dict[BaseChooser]
"""
def __init__(self, a):
self.a = a
self.last_status = blending_status.UNKNOWN_ERROR
self.choosers = {
ChooseNull.__name__: ChooseNull,
ChooseAll.__name__: ChooseAll,
ChooseInSTIRange.__name__: ChooseInSTIRange
}
def get_chooser(self, config_base):
"""Provider method for atoms chooser.
Args:
config_base: A Node to save custom config.
:param config_base: Atom
Returns:
The instance of atoms chooser.
:rtype : BaseChooser
Raises:
UserWarning: Can't find the atoms chooser with given name.
"""
self.last_status = blending_status.IN_PROCESS
chooser = self.choosers.get(
BlendConfig().get_str(self.a, "atoms-chooser", config_base)
)
if chooser is not None:
self.last_status = blending_status.SUCCESS
return chooser(self.a)
else:
self.last_status = blending_status.PARAMETER_ERROR
raise UserWarning
|
MountainWei/nova | refs/heads/master | nova/tests/unit/api/openstack/compute/test_rescue.py | 29 | # Copyright 2011 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from oslo_config import cfg
import webob
from nova.api.openstack.compute.legacy_v2.contrib import rescue as rescue_v2
from nova.api.openstack.compute import rescue as rescue_v21
from nova.api.openstack import extensions
from nova import compute
from nova import exception
from nova import test
from nova.tests.unit.api.openstack import fakes
CONF = cfg.CONF
CONF.import_opt('password_length', 'nova.utils')
UUID = '70f6db34-de8d-4fbd-aafb-4065bdfa6114'
def rescue(self, context, instance, rescue_password=None,
rescue_image_ref=None):
pass
def unrescue(self, context, instance):
pass
def fake_compute_get(*args, **kwargs):
return {'id': 1, 'uuid': UUID}
class RescueTestV21(test.NoDBTestCase):
def setUp(self):
super(RescueTestV21, self).setUp()
self.stubs.Set(compute.api.API, "get", fake_compute_get)
self.stubs.Set(compute.api.API, "rescue", rescue)
self.stubs.Set(compute.api.API, "unrescue", unrescue)
self.controller = self._set_up_controller()
self.fake_req = fakes.HTTPRequest.blank('')
def _set_up_controller(self):
return rescue_v21.RescueController()
def test_rescue_from_locked_server(self):
def fake_rescue_from_locked_server(self, context,
instance, rescue_password=None, rescue_image_ref=None):
raise exception.InstanceIsLocked(instance_uuid=instance['uuid'])
self.stubs.Set(compute.api.API,
'rescue',
fake_rescue_from_locked_server)
body = {"rescue": {"adminPass": "AABBCC112233"}}
self.assertRaises(webob.exc.HTTPConflict,
self.controller._rescue,
self.fake_req, UUID, body=body)
def test_rescue_with_preset_password(self):
body = {"rescue": {"adminPass": "AABBCC112233"}}
resp = self.controller._rescue(self.fake_req, UUID, body=body)
self.assertEqual("AABBCC112233", resp['adminPass'])
def test_rescue_generates_password(self):
body = dict(rescue=None)
resp = self.controller._rescue(self.fake_req, UUID, body=body)
self.assertEqual(CONF.password_length, len(resp['adminPass']))
def test_rescue_of_rescued_instance(self):
body = dict(rescue=None)
def fake_rescue(*args, **kwargs):
raise exception.InstanceInvalidState('fake message')
self.stubs.Set(compute.api.API, "rescue", fake_rescue)
self.assertRaises(webob.exc.HTTPConflict,
self.controller._rescue,
self.fake_req, UUID, body=body)
def test_unrescue(self):
body = dict(unrescue=None)
resp = self.controller._unrescue(self.fake_req, UUID, body=body)
# NOTE: on v2.1, http status code is set as wsgi_code of API
# method instead of status_int in a response object.
if isinstance(self.controller,
rescue_v21.RescueController):
status_int = self.controller._unrescue.wsgi_code
else:
status_int = resp.status_int
self.assertEqual(202, status_int)
def test_unrescue_from_locked_server(self):
def fake_unrescue_from_locked_server(self, context,
instance):
raise exception.InstanceIsLocked(instance_uuid=instance['uuid'])
self.stubs.Set(compute.api.API,
'unrescue',
fake_unrescue_from_locked_server)
body = dict(unrescue=None)
self.assertRaises(webob.exc.HTTPConflict,
self.controller._unrescue,
self.fake_req, UUID, body=body)
def test_unrescue_of_active_instance(self):
body = dict(unrescue=None)
def fake_unrescue(*args, **kwargs):
raise exception.InstanceInvalidState('fake message')
self.stubs.Set(compute.api.API, "unrescue", fake_unrescue)
self.assertRaises(webob.exc.HTTPConflict,
self.controller._unrescue,
self.fake_req, UUID, body=body)
def test_rescue_raises_unrescuable(self):
body = dict(rescue=None)
def fake_rescue(*args, **kwargs):
raise exception.InstanceNotRescuable('fake message')
self.stubs.Set(compute.api.API, "rescue", fake_rescue)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._rescue,
self.fake_req, UUID, body=body)
@mock.patch('nova.compute.api.API.rescue')
def test_rescue_with_image_specified(self, mock_compute_api_rescue):
instance = fake_compute_get()
body = {"rescue": {"adminPass": "ABC123",
"rescue_image_ref": "img-id"}}
resp_json = self.controller._rescue(self.fake_req, UUID, body=body)
self.assertEqual("ABC123", resp_json['adminPass'])
mock_compute_api_rescue.assert_called_with(mock.ANY, instance,
rescue_password=u'ABC123',
rescue_image_ref=u'img-id')
@mock.patch('nova.compute.api.API.rescue')
def test_rescue_without_image_specified(self, mock_compute_api_rescue):
instance = fake_compute_get()
body = {"rescue": {"adminPass": "ABC123"}}
resp_json = self.controller._rescue(self.fake_req, UUID, body=body)
self.assertEqual("ABC123", resp_json['adminPass'])
mock_compute_api_rescue.assert_called_with(mock.ANY, instance,
rescue_password=u'ABC123',
rescue_image_ref=None)
def test_rescue_with_none(self):
body = dict(rescue=None)
resp = self.controller._rescue(self.fake_req, UUID, body=body)
self.assertEqual(CONF.password_length, len(resp['adminPass']))
def test_rescue_with_empty_dict(self):
body = dict(rescue=dict())
resp = self.controller._rescue(self.fake_req, UUID, body=body)
self.assertEqual(CONF.password_length, len(resp['adminPass']))
def test_rescue_disable_password(self):
self.flags(enable_instance_password=False)
body = dict(rescue=None)
resp_json = self.controller._rescue(self.fake_req, UUID, body=body)
self.assertNotIn('adminPass', resp_json)
def test_rescue_with_invalid_property(self):
body = {"rescue": {"test": "test"}}
self.assertRaises(exception.ValidationError,
self.controller._rescue,
self.fake_req, UUID, body=body)
class RescueTestV20(RescueTestV21):
def _set_up_controller(self):
ext_mgr = extensions.ExtensionManager()
ext_mgr.extensions = {'os-extended-rescue-with-image': 'fake'}
return rescue_v2.RescueController(ext_mgr)
def test_rescue_with_invalid_property(self):
# NOTE(cyeoh): input validation in original v2 code does not
# check for invalid properties.
pass
def test_rescue_disable_password(self):
# NOTE(cyeoh): Original v2.0 code does not support disabling
# the admin password being returned through a conf setting
pass
class RescuePolicyEnforcementV21(test.NoDBTestCase):
def setUp(self):
super(RescuePolicyEnforcementV21, self).setUp()
self.controller = rescue_v21.RescueController()
self.req = fakes.HTTPRequest.blank('')
def test_rescue_policy_failed(self):
rule_name = "os_compute_api:os-rescue"
self.policy.set_rules({rule_name: "project:non_fake"})
body = {"rescue": {"adminPass": "AABBCC112233"}}
exc = self.assertRaises(
exception.PolicyNotAuthorized,
self.controller._rescue, self.req, fakes.FAKE_UUID,
body=body)
self.assertEqual(
"Policy doesn't allow %s to be performed." % rule_name,
exc.format_message())
def test_unrescue_policy_failed(self):
rule_name = "os_compute_api:os-rescue"
self.policy.set_rules({rule_name: "project:non_fake"})
body = dict(unrescue=None)
exc = self.assertRaises(
exception.PolicyNotAuthorized,
self.controller._unrescue, self.req, fakes.FAKE_UUID,
body=body)
self.assertEqual(
"Policy doesn't allow %s to be performed." % rule_name,
exc.format_message())
|
microcom/odoo | refs/heads/9.0 | addons/l10n_hr/__init__.py | 110 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
# Author: Goran Kliska
# mail: goran.kliska(AT)slobodni-programi.hr
# Copyright (C) 2011- Slobodni programi d.o.o., Zagreb
|
colinnewell/odoo | refs/heads/8.0 | addons/hr_contract/__init__.py | 381 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-Today OpenERP SA (<http://www.openerp.com)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import hr_contract
import base_action_rule
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
yaniv14/OpenCommunity | refs/heads/master | src/communities/forms.py | 1 | from datetime import datetime, date, time
from communities.models import Committee, CommunityGroup, CommunityGroupRole
from django.utils.translation import ugettext_lazy as _, gettext
from ocd.formfields import HTMLArea, OCCheckboxSelectMultiple, OCSplitDateTimeField
import floppyforms.__future__ as forms
from haystack.forms import ModelSearchForm
class EditUpcomingMeetingForm(forms.ModelForm):
class Meta:
model = Committee
fields = (
'upcoming_meeting_title',
'upcoming_meeting_location',
'upcoming_meeting_scheduled_at',
# 'voting_ends_at',
'upcoming_meeting_comments',
)
widgets = {
'upcoming_meeting_title': forms.TextInput,
'upcoming_meeting_location': forms.TextInput,
# 'voting_ends_at': OCSplitDateTime,
'upcoming_meeting_comments': HTMLArea,
}
field_classes = {
'upcoming_meeting_scheduled_at': OCSplitDateTimeField,
}
def __init__(self, *args, **kwargs):
super(EditUpcomingMeetingForm, self).__init__(*args, **kwargs)
self.fields['upcoming_meeting_title'].label = _('Title')
self.fields['upcoming_meeting_scheduled_at'].label = _('Scheduled at')
self.fields['upcoming_meeting_location'].label = _('Location')
self.fields['upcoming_meeting_comments'].label = _('Background')
"""
removed this function as we don't include voting_end_time in the form any more.
# ----------------------------------------------------------------------------
def clean(self):
#prevent voting end time from illegal values (past time,
#time after meeting schedule)
try:
voting_ends_at = self.cleaned_data['voting_ends_at']
except KeyError:
voting_ends_at = None
try:
meeting_time = self.cleaned_data['upcoming_meeting_scheduled_at']
except KeyError:
meeting_time = None
if voting_ends_at:
if voting_ends_at <= timezone.now():
raise forms.ValidationError(_("End voting time cannot be set to the past"))
if meeting_time and voting_ends_at > meeting_time:
raise forms.ValidationError(_("End voting time cannot be set to after the meeting time"))
return self.cleaned_data
"""
def save(self):
c = super(EditUpcomingMeetingForm, self).save()
c.voting_ends_at = datetime.combine(date(2025, 1, 1), time(12, 0, 0))
c.save()
return c
class PublishUpcomingMeetingForm(forms.ModelForm):
me = forms.BooleanField(label=_("Me only"), widget=forms.CheckboxInput, required=False)
all_members = forms.BooleanField(label=_("All members"), widget=forms.CheckboxInput, required=False)
send_to = forms.MultipleChoiceField(widget=forms.CheckboxSelectMultiple(), required=False)
class Meta:
model = CommunityGroup
fields = ()
class EditUpcomingMeetingSummaryForm(forms.ModelForm):
class Meta:
model = Committee
fields = (
'upcoming_meeting_summary',
)
widgets = {
'upcoming_meeting_summary': HTMLArea,
}
class UpcomingMeetingParticipantsForm(forms.ModelForm):
class Meta:
model = Committee
fields = (
'upcoming_meeting_participants',
'upcoming_meeting_guests',
)
widgets = {
'upcoming_meeting_participants': OCCheckboxSelectMultiple,
'upcoming_meeting_guests': forms.Textarea,
}
# def __init__(self, *args, **kwargs):
# super(UpcomingMeetingParticipantsForm, self).__init__(*args, **kwargs)
# participants = self.instance.upcoming_meeting_participants.values_list(
# 'id', flat=True)
# board_in = []
# board_choices = []
# # for b in self.instance.get_board_members():
# for b in self.instance.get_community_participant_members():
# board_choices.append((b.id, b.display_name,))
# if b.id in participants:
# board_in.append(b.id)
# self.fields['board'].choices = board_choices
# self.initial['board'] = board_in
# self.fields['upcoming_meeting_participants'].queryset = self.instance.get_members()
# self.fields['upcoming_meeting_participants'].label = ""
class CommunitySearchForm(ModelSearchForm):
pass
# def search(self):
# # First, store the SearchQuerySet received from other processing.
# sqs = super(DateRangeSearchForm, self).search()
#
# if not self.is_valid():
# return self.no_query_found()
#
# return sqs
class GroupForm(forms.ModelForm):
class Meta:
model = CommunityGroup
fields = (
'title',
)
widgets = {
'title': forms.TextInput,
}
def __init__(self, community=None, *args, **kwargs):
super(GroupForm, self).__init__(*args, **kwargs)
if community:
self.group_role = GroupRoleForm(community=community, prefix='group_role',
data=self.data if self.is_bound else None)
self.group_role.fields['role'].required = False
self.group_role.fields['committee'].required = False
self.group_role.fields.pop('group')
def is_valid(self):
valid = super(GroupForm, self).is_valid()
if not self.data.get('group_role-group') or not self.data.get('group_role-role') or not self.data.get('group_role-committee'):
return valid
return self.group_role.is_valid() and valid
def save(self, commit=True):
o = super(GroupForm, self).save(commit)
if self.data.get('group_role-committee') and self.data.get('group_role-role'):
self.group_role.instance.group = o
self.group_role.save()
return o
class GroupRoleForm(forms.ModelForm):
class Meta:
model = CommunityGroupRole
fields = (
'group',
'role',
'committee'
)
def __init__(self, community=None, *args, **kwargs):
super(GroupRoleForm, self).__init__(*args, **kwargs)
self.fields['group'].queryset = community.groups.all()
self.fields['role'].queryset = community.roles.all()
self.fields['committee'].queryset = community.committees.all()
|
eurosata1/e2 | refs/heads/master | lib/python/Components/About.py | 2 | # -*- coding: utf-8 -*-
from boxbranding import getBoxType
import struct, socket, fcntl, sys, os, time
from sys import modules
import os
import time
def getVersionString():
return getImageVersionString()
def getImageVersionString():
try:
if os.path.isfile('/var/lib/opkg/status'):
st = os.stat('/var/lib/opkg/status')
else:
st = os.stat('/usr/lib/ipkg/status')
tm = time.localtime(st.st_mtime)
if tm.tm_year >= 2011:
return time.strftime("%Y-%m-%d %H:%M:%S", tm)
except:
pass
return _("unavailable")
def getFlashDateString():
try:
return time.strftime(_("%Y-%m-%d %H:%M"), time.localtime(os.stat("/boot").st_ctime))
except:
return _("unknown")
def getEnigmaVersionString():
import enigma
enigma_version = enigma.getEnigmaVersionString()
if '-(no branch)' in enigma_version:
enigma_version = enigma_version [:-12]
return enigma_version
def getGStreamerVersionString():
import enigma
return enigma.getGStreamerVersionString()
def getKernelVersionString():
try:
f = open("/proc/version","r")
kernelversion = f.read().split(' ', 4)[2].split('-',2)[0]
f.close()
return kernelversion
except:
return _("unknown")
def getDriverInstalledDate():
try:
from glob import glob
driver = [x.split("-")[-2:-1][0][-8:] for x in open(glob("/var/lib/opkg/info/*-dvb-modules-*.control")[0], "r") if x.startswith("Version:")][0]
return "%s-%s-%s" % (driver[:4], driver[4:6], driver[6:])
except:
return _("unknown")
def getChipSetString():
if getBoxType() in ('dm7080','dm820'):
return "7435"
else:
try:
f = open('/proc/stb/info/chipset', 'r')
chipset = f.read()
f.close()
return str(chipset.lower().replace('\n','').replace('bcm','').replace('brcm','').replace('sti',''))
except:
return _("unavailable")
def getModelString():
try:
file = open("/proc/stb/info/boxtype", "r")
model = file.readline().strip()
file.close()
return model
except IOError:
return _("unknown")
def getPythonVersionString():
try:
import commands
status, output = commands.getstatusoutput("python -V")
return output.split(' ')[1]
except:
return _("unknown")
def getCPUString():
try:
file = open('/proc/cpuinfo', 'r')
lines = file.readlines()
for x in lines:
splitted = x.split(': ')
if len(splitted) > 1:
splitted[1] = splitted[1].replace('\n','')
if splitted[0].startswith("system type"):
system = splitted[1].split(' ')[0]
elif splitted[0].startswith("Processor"):
system = splitted[1].split(' ')[0]
file.close()
return system
except IOError:
return _("unavailable")
def getCPUSpeedString():
try:
file = open('/proc/cpuinfo', 'r')
lines = file.readlines()
for x in lines:
splitted = x.split(': ')
if len(splitted) > 1:
splitted[1] = splitted[1].replace('\n','')
if splitted[0].startswith("cpu MHz"):
speed = splitted[1].split('.')[0]
file.close()
return speed
except IOError:
return _("unavailable")
def getCpuCoresString():
try:
file = open('/proc/cpuinfo', 'r')
lines = file.readlines()
for x in lines:
splitted = x.split(': ')
if len(splitted) > 1:
splitted[1] = splitted[1].replace('\n','')
if splitted[0].startswith("processor"):
if int(splitted[1]) > 0:
cores = 2
else:
cores = 1
file.close()
return cores
except IOError:
return _("unavailable")
def getCPUTempString():
try:
if os.path.isfile('/proc/stb/fp/temp_sensor_avs'):
temperature = open("/proc/stb/fp/temp_sensor_avs").readline().replace('\n','')
return _("%sยฐC") % temperature
except:
pass
return ""
def _ifinfo(sock, addr, ifname):
iface = struct.pack('256s', ifname[:15])
info = fcntl.ioctl(sock.fileno(), addr, iface)
if addr == 0x8927:
return ''.join(['%02x:' % ord(char) for char in info[18:24]])[:-1].upper()
else:
return socket.inet_ntoa(info[20:24])
def getIfConfig(ifname):
ifreq = {'ifname': ifname}
infos = {}
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
# offsets defined in /usr/include/linux/sockios.h on linux 2.6
infos['addr'] = 0x8915 # SIOCGIFADDR
infos['brdaddr'] = 0x8919 # SIOCGIFBRDADDR
infos['hwaddr'] = 0x8927 # SIOCSIFHWADDR
infos['netmask'] = 0x891b # SIOCGIFNETMASK
try:
print "in TRYYYYYYY", ifname
for k,v in infos.items():
print infos.items()
ifreq[k] = _ifinfo(sock, v, ifname)
except:
print "IN EXCEEEEEEEEPT", ifname
pass
sock.close()
return ifreq
def getIfTransferredData(ifname):
f = open('/proc/net/dev', 'r')
for line in f:
if ifname in line:
data = line.split('%s:' % ifname)[1].split()
rx_bytes, tx_bytes = (data[0], data[8])
f.close()
return rx_bytes, tx_bytes
def getHardwareTypeString():
try:
if os.path.isfile("/proc/stb/info/boxtype"):
return open("/proc/stb/info/boxtype").read().strip().upper()
if os.path.isfile("/proc/stb/info/azmodel"):
return "AZBOX " + open("/proc/stb/info/azmodel").read().strip().upper() + "(" + open("/proc/stb/info/version").read().strip().upper() + ")"
if os.path.isfile("/proc/stb/info/vumodel"):
return "VU+" + open("/proc/stb/info/vumodel").read().strip().upper() + "(" + open("/proc/stb/info/version").read().strip().upper() + ")"
if os.path.isfile("/proc/stb/info/model"):
return open("/proc/stb/info/model").read().strip().upper()
except:
pass
return _("unavailable")
def getImageTypeString():
try:
return open("/etc/issue").readlines()[-2].capitalize().strip()[:-6]
except:
pass
return _("undefined")
# For modules that do "from About import about"
about = modules[__name__]
|
martint/airlift | refs/heads/master | launcher/src/main/scripts/bin/launcher.py | 12 | #!/usr/bin/env python
import errno
import os
import platform
import sys
import traceback
from fcntl import flock, LOCK_EX, LOCK_NB
from optparse import OptionParser
from os import O_RDWR, O_CREAT, O_WRONLY, O_APPEND
from os.path import basename, dirname, exists, realpath
from os.path import join as pathjoin
from signal import SIGTERM, SIGKILL
from stat import S_ISLNK
from time import sleep
COMMANDS = ['run', 'start', 'stop', 'restart', 'kill', 'status']
LSB_NOT_RUNNING = 3
LSB_STATUS_UNKNOWN = 4
def find_install_path(f):
"""Find canonical parent of bin/launcher.py"""
if basename(f) != 'launcher.py':
raise Exception("Expected file '%s' to be 'launcher.py' not '%s'" % (f, basename(f)))
p = realpath(dirname(f))
if basename(p) != 'bin':
raise Exception("Expected file '%s' directory to be 'bin' not '%s" % (f, basename(p)))
return dirname(p)
def makedirs(p):
"""Create directory and all intermediate ones"""
try:
os.makedirs(p)
except OSError, e:
if e.errno != errno.EEXIST:
raise
def load_properties(f):
"""Load key/value pairs from a file"""
properties = {}
for line in load_lines(f):
k, v = line.split('=', 1)
properties[k.strip()] = v.strip()
return properties
def load_lines(f):
"""Load lines from a file, ignoring blank or comment lines"""
lines = []
for line in file(f, 'r').readlines():
line = line.strip()
if len(line) > 0 and not line.startswith('#'):
lines.append(line)
return lines
def try_lock(f):
"""Try to open an exclusive lock (inheritable) on a file"""
try:
flock(f, LOCK_EX | LOCK_NB)
return True
except IOError:
return False
def open_read_write(f, mode):
"""Open file in read/write mode (without truncating it)"""
return os.fdopen(os.open(f, O_RDWR | O_CREAT, mode), 'r+')
class Process:
def __init__(self, path):
makedirs(dirname(path))
self.path = path
self.pid_file = open_read_write(path, 0600)
self.refresh()
def refresh(self):
self.locked = try_lock(self.pid_file)
def clear_pid(self):
assert self.locked, 'pid file not locked by us'
self.pid_file.seek(0)
self.pid_file.truncate()
def write_pid(self, pid):
self.clear_pid()
self.pid_file.write(str(pid) + '\n')
self.pid_file.flush()
def alive(self):
self.refresh()
if self.locked:
return False
pid = self.read_pid()
try:
os.kill(pid, 0)
return True
except OSError, e:
raise Exception('Signaling pid %s failed: %s' % (pid, e))
def read_pid(self):
assert not self.locked, 'pid file is locked by us'
self.pid_file.seek(0)
line = self.pid_file.readline().strip()
if len(line) == 0:
raise Exception("Pid file '%s' is empty" % self.path)
try:
pid = int(line)
except ValueError:
raise Exception("Pid file '%s' contains garbage: %s" % (self.path, line))
if pid <= 0:
raise Exception("Pid file '%s' contains an invalid pid: %s" % (self.path, pid))
return pid
def redirect_stdin_to_devnull():
"""Redirect stdin to /dev/null"""
fd = os.open(os.devnull, O_RDWR)
os.dup2(fd, sys.stdin.fileno())
os.close(fd)
def open_append(f):
"""Open a raw file descriptor in append mode"""
# noinspection PyTypeChecker
return os.open(f, O_WRONLY | O_APPEND | O_CREAT, 0644)
def redirect_output(fd):
"""Redirect stdout and stderr to a file descriptor"""
os.dup2(fd, sys.stdout.fileno())
os.dup2(fd, sys.stderr.fileno())
def symlink_exists(p):
"""Check if symlink exists and raise if another type of file exists"""
try:
st = os.lstat(p)
if not S_ISLNK(st.st_mode):
raise Exception('Path exists and is not a symlink: %s' % p)
return True
except OSError, e:
if e.errno != errno.ENOENT:
raise
return False
def create_symlink(source, target):
"""Create a symlink, removing the target first if it is a symlink"""
if symlink_exists(target):
os.remove(target)
if exists(source):
os.symlink(source, target)
def create_app_symlinks(options):
"""
Symlink the 'etc' and 'plugin' directory into the data directory.
This is needed to support programs that reference 'etc/xyz' from within
their config files: log.levels-file=etc/log.properties
"""
if options.install_path != options.data_dir:
create_symlink(
pathjoin(options.install_path, 'etc'),
pathjoin(options.data_dir, 'etc'))
create_symlink(
pathjoin(options.install_path, 'plugin'),
pathjoin(options.data_dir, 'plugin'))
def build_java_execution(options, daemon):
if not exists(options.config_path):
raise Exception('Config file is missing: %s' % options.config_path)
if not exists(options.jvm_config):
raise Exception('JVM config file is missing: %s' % options.jvm_config)
if not exists(options.launcher_config):
raise Exception('Launcher config file is missing: %s' % options.launcher_config)
if options.log_levels_set and not exists(options.log_levels):
raise Exception('Log levels file is missing: %s' % options.log_levels)
properties = options.properties.copy()
if exists(options.log_levels):
properties['log.levels-file'] = options.log_levels
if daemon:
properties['log.output-file'] = options.server_log
properties['log.enable-console'] = 'false'
jvm_properties = load_lines(options.jvm_config)
launcher_properties = load_properties(options.launcher_config)
try:
main_class = launcher_properties['main-class']
except KeyError:
raise Exception("Launcher config is missing 'main-class' property")
properties['config'] = options.config_path
system_properties = ['-D%s=%s' % i for i in properties.iteritems()]
classpath = pathjoin(options.install_path, 'lib', '*')
command = ['java', '-cp', classpath]
command += jvm_properties + system_properties
command += [main_class]
if options.verbose:
print command
print
env = os.environ.copy()
# set process name: https://github.com/electrum/procname
process_name = launcher_properties.get('process-name', '')
if len(process_name) > 0:
system = platform.system() + '-' + platform.machine()
if system == 'Linux-x86_64':
shim = pathjoin(options.install_path, 'bin', 'procname', system, 'libprocname.so')
env['LD_PRELOAD'] = (env.get('LD_PRELOAD', '') + ' ' + shim).strip()
env['PROCNAME'] = process_name
return command, env
def run(process, options):
if process.alive():
print 'Already running as %s' % process.read_pid()
return
create_app_symlinks(options)
args, env = build_java_execution(options, False)
makedirs(options.data_dir)
os.chdir(options.data_dir)
process.write_pid(os.getpid())
redirect_stdin_to_devnull()
os.execvpe(args[0], args, env)
def start(process, options):
if process.alive():
print 'Already running as %s' % process.read_pid()
return
create_app_symlinks(options)
args, env = build_java_execution(options, True)
makedirs(dirname(options.launcher_log))
log = open_append(options.launcher_log)
makedirs(options.data_dir)
os.chdir(options.data_dir)
pid = os.fork()
if pid > 0:
process.write_pid(pid)
print 'Started as %s' % pid
return
os.setsid()
redirect_stdin_to_devnull()
redirect_output(log)
os.close(log)
os.execvpe(args[0], args, env)
def terminate(process, signal, message):
if not process.alive():
print 'Not runnning'
return
pid = process.read_pid()
while True:
try:
os.kill(pid, signal)
except OSError, e:
if e.errno != errno.ESRCH:
raise Exception('Signaling pid %s failed: %s' % (pid, e))
if not process.alive():
process.clear_pid()
break
sleep(0.1)
print '%s %s' % (message, pid)
def stop(process):
terminate(process, SIGTERM, 'Stopped')
def kill(process):
terminate(process, SIGKILL, 'Killed')
def status(process):
if not process.alive():
print 'Not running'
sys.exit(LSB_NOT_RUNNING)
print 'Running as %s' % process.read_pid()
def handle_command(command, options):
process = Process(options.pid_file)
if command == 'run':
run(process, options)
elif command == 'start':
start(process, options)
elif command == 'stop':
stop(process)
elif command == 'restart':
stop(process)
start(process, options)
elif command == 'kill':
kill(process)
elif command == 'status':
status(process)
else:
raise AssertionError('Unhandled command: ' + command)
def create_parser():
commands = 'Commands: ' + ', '.join(COMMANDS)
parser = OptionParser(prog='launcher', usage='usage: %prog [options] command', description=commands)
parser.add_option('-v', '--verbose', action='store_true', default=False, help='Run verbosely')
parser.add_option('--launcher-config', metavar='FILE', help='Defaults to INSTALL_PATH/bin/launcher.properties')
parser.add_option('--node-config', metavar='FILE', help='Defaults to INSTALL_PATH/etc/node.properties')
parser.add_option('--jvm-config', metavar='FILE', help='Defaults to INSTALL_PATH/etc/jvm.config')
parser.add_option('--config', metavar='FILE', help='Defaults to INSTALL_PATH/etc/config.properties')
parser.add_option('--log-levels-file', metavar='FILE', help='Defaults to INSTALL_PATH/etc/log.properties')
parser.add_option('--data-dir', metavar='DIR', help='Defaults to INSTALL_PATH')
parser.add_option('--pid-file', metavar='FILE', help='Defaults to DATA_DIR/var/run/launcher.pid')
parser.add_option('--launcher-log-file', metavar='FILE', help='Defaults to DATA_DIR/var/log/launcher.log (only in daemon mode)')
parser.add_option('--server-log-file', metavar='FILE', help='Defaults to DATA_DIR/var/log/server.log (only in daemon mode)')
parser.add_option('-D', action='append', metavar='NAME=VALUE', dest='properties', help='Set a Java system property')
return parser
def parse_properties(parser, args):
properties = {}
for arg in args:
if '=' not in arg:
parser.error('property is malformed: %s' % arg)
key, value = [i.strip() for i in arg.split('=', 1)]
if key == 'config':
parser.error('cannot specify config using -D option (use --config)')
if key == 'log.output-file':
parser.error('cannot specify server log using -D option (use --server-log-file)')
if key == 'log.levels-file':
parser.error('cannot specify log levels using -D option (use --log-levels-file)')
properties[key] = value
return properties
def print_options(options):
if options.verbose:
for i in sorted(vars(options)):
print "%-15s = %s" % (i, getattr(options, i))
print
class Options:
pass
def main():
parser = create_parser()
(options, args) = parser.parse_args()
if len(args) != 1:
if len(args) == 0:
parser.error('command name not specified')
else:
parser.error('too many arguments')
command = args[0]
if command not in COMMANDS:
parser.error('unsupported command: %s' % command)
try:
install_path = find_install_path(sys.argv[0])
except Exception, e:
print 'ERROR: %s' % e
sys.exit(LSB_STATUS_UNKNOWN)
o = Options()
o.verbose = options.verbose
o.install_path = install_path
o.launcher_config = realpath(options.launcher_config or pathjoin(o.install_path, 'bin/launcher.properties'))
o.node_config = realpath(options.node_config or pathjoin(o.install_path, 'etc/node.properties'))
o.jvm_config = realpath(options.jvm_config or pathjoin(o.install_path, 'etc/jvm.config'))
o.config_path = realpath(options.config or pathjoin(o.install_path, 'etc/config.properties'))
o.log_levels = realpath(options.log_levels_file or pathjoin(o.install_path, 'etc/log.properties'))
o.log_levels_set = bool(options.log_levels_file)
if options.node_config and not exists(o.node_config):
parser.error('Node config file is missing: %s' % o.node_config)
node_properties = {}
if exists(o.node_config):
node_properties = load_properties(o.node_config)
data_dir = node_properties.get('node.data-dir')
o.data_dir = realpath(options.data_dir or data_dir or o.install_path)
o.pid_file = realpath(options.pid_file or pathjoin(o.data_dir, 'var/run/launcher.pid'))
o.launcher_log = realpath(options.launcher_log_file or pathjoin(o.data_dir, 'var/log/launcher.log'))
o.server_log = realpath(options.server_log_file or pathjoin(o.data_dir, 'var/log/server.log'))
o.properties = parse_properties(parser, options.properties or {})
for k, v in node_properties.iteritems():
if k not in o.properties:
o.properties[k] = v
if o.verbose:
print_options(o)
try:
handle_command(command, o)
except SystemExit:
raise
except Exception, e:
if o.verbose:
traceback.print_exc()
else:
print 'ERROR: %s' % e
sys.exit(LSB_STATUS_UNKNOWN)
if __name__ == '__main__':
main()
|
willingc/oh-mainline | refs/heads/master | mysite/search/management/__init__.py | 90 | # This file is part of OpenHatch.
# Copyright (C) 2010 OpenHatch, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
agrista/odoo-saas | refs/heads/master | addons/l10n_be/wizard/__init__.py | 438 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import l10n_be_partner_vat_listing
import l10n_be_vat_intra
import l10n_be_account_vat_declaration
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
proxysh/Safejumper-for-Desktop | refs/heads/master | buildmac/Resources/env/lib/python2.7/site-packages/twisted/cred/test/test_simpleauth.py | 24 | # Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Tests for basic constructs of L{twisted.cred.credentials}.
"""
from __future__ import division, absolute_import
from twisted.trial.unittest import TestCase
from twisted.cred.credentials import UsernamePassword, IUsernamePassword
from twisted.cred.credentials import UsernameHashedPassword
from twisted.cred.credentials import IUsernameHashedPassword
class UsernamePasswordTests(TestCase):
"""
Tests for L{UsernamePassword}.
"""
def test_initialisation(self):
"""
The initialisation of L{UsernamePassword} will set C{username} and
C{password} on it.
"""
creds = UsernamePassword(b"foo", b"bar")
self.assertEqual(creds.username, b"foo")
self.assertEqual(creds.password, b"bar")
def test_correctPassword(self):
"""
Calling C{checkPassword} on a L{UsernamePassword} will return L{True}
when the password given is the password on the object.
"""
creds = UsernamePassword(b"user", b"pass")
self.assertTrue(creds.checkPassword(b"pass"))
def test_wrongPassword(self):
"""
Calling C{checkPassword} on a L{UsernamePassword} will return L{False}
when the password given is NOT the password on the object.
"""
creds = UsernamePassword(b"user", b"pass")
self.assertFalse(creds.checkPassword(b"someotherpass"))
def test_interface(self):
"""
L{UsernamePassword} implements L{IUsernamePassword}.
"""
self.assertTrue(IUsernamePassword.implementedBy(UsernamePassword))
class UsernameHashedPasswordTests(TestCase):
"""
Tests for L{UsernameHashedPassword}.
"""
def test_initialisation(self):
"""
The initialisation of L{UsernameHashedPassword} will set C{username}
and C{hashed} on it.
"""
creds = UsernameHashedPassword(b"foo", b"bar")
self.assertEqual(creds.username, b"foo")
self.assertEqual(creds.hashed, b"bar")
def test_correctPassword(self):
"""
Calling C{checkPassword} on a L{UsernameHashedPassword} will return
L{True} when the password given is the password on the object.
"""
creds = UsernameHashedPassword(b"user", b"pass")
self.assertTrue(creds.checkPassword(b"pass"))
def test_wrongPassword(self):
"""
Calling C{checkPassword} on a L{UsernameHashedPassword} will return
L{False} when the password given is NOT the password on the object.
"""
creds = UsernameHashedPassword(b"user", b"pass")
self.assertFalse(creds.checkPassword(b"someotherpass"))
def test_interface(self):
"""
L{UsernameHashedPassword} implements L{IUsernameHashedPassword}.
"""
self.assertTrue(
IUsernameHashedPassword.implementedBy(UsernameHashedPassword))
|
bryanph/OIPA | refs/heads/develop | OIPA/geodata/geojson_to_wkt.py | 4 | import StringIO
import tokenize
INVALID_WKT_FMT = 'Invalid WKT: `%s`'
def dump(obj, dest_file):
"""
Dump GeoJSON-like `dict` to WKT and write it to the `dest_file`.
:param dict obj:
A GeoJSON-like dictionary. It must at least the keys 'type' and
'coordinates'.
:param dest_file:
Open and writable file-like object.
"""
dest_file.write(dumps(obj))
def load(source_file):
"""
Load a GeoJSON `dict` object from a ``source_file`` containing WKT.
:param source_file:
Open and readable file-like object.
:returns:
A GeoJSON `dict` representing the geometry read from the file.
"""
return loads(source_file.read())
def dumps(obj, decimals=16):
"""
Dump a GeoJSON-like `dict` to a WKT string.
"""
geom_type = obj['type']
exporter = __dumps_registry.get(geom_type)
if exporter is None:
__unsupported_geom_type(geom_type)
fmt = '%%.%df' % decimals
return exporter(obj, fmt)
def loads(string):
"""
Construct a GeoJSON `dict` from WKT (`string`).
"""
sio = StringIO.StringIO(string)
# NOTE: This is not the intended purpose of `tokenize`, but it works.
tokens = (x[1] for x in tokenize.generate_tokens(sio.readline))
tokens = __tokenize_wkt(tokens)
geom_type = tokens.next()
importer = __loads_registry.get(geom_type)
if importer is None:
__unsupported_geom_type(geom_type)
return importer(tokens, string)
def __tokenize_wkt(tokens):
"""
Since the tokenizer treats "-" and numeric strings as separate values,
combine them and yield them as a single token. This utility encapsulates
parsing of negative numeric values from WKT can be used generically in all
parsers.
"""
negative = False
for t in tokens:
if t == '-':
negative = True
continue
else:
if negative:
yield '-%s' % t
else:
yield t
negative = False
def __unsupported_geom_type(geom_type):
raise ValueError("Unsupported geometry type '%s'" % geom_type)
def __dump_point(obj, fmt):
"""
Dump a GeoJSON-like Point object to WKT.
:param dict obj:
A GeoJSON-like `dict` representing a Point.
:param str fmt:
Format string which indicates the number of digits to display after the
decimal point when formatting coordinates.
:returns:
WKT representation of the input GeoJSON Point ``obj``.
"""
coords = obj['coordinates']
pt = 'POINT (%s)' % ' '.join(fmt % c for c in coords)
return pt
def __dump_linestring(obj, fmt):
"""
Dump a GeoJSON-like LineString object to WKT.
Input parameters and return value are the LINESTRING equivalent to
:func:`__dump_point`.
"""
coords = obj['coordinates']
ls = 'LINESTRING (%s)'
ls %= ', '.join(' '.join(fmt % c for c in pt) for pt in coords)
return ls
def __dump_polygon(obj, fmt):
"""
Dump a GeoJSON-like Polygon object to WKT.
Input parameters and return value are the POLYGON equivalent to
:func:`__dump_point`.
"""
coords = obj['coordinates']
poly = 'POLYGON (%s)'
rings = (', '.join(' '.join(fmt % c for c in pt) for pt in ring)
for ring in coords)
rings = ('(%s)' % r for r in rings)
poly %= ', '.join(rings)
return poly
def __dump_multipoint(obj, fmt):
"""
Dump a GeoJSON-like MultiPoint object to WKT.
Input parameters and return value are the POLYGON equivalent to
:func:`__dump_point`.
"""
coords = obj['coordinates']
mp = 'MULTIPOINT (%s)'
points = (' '.join(fmt % c for c in pt) for pt in coords)
# Add parens around each point.
points = ('(%s)' % pt for pt in points)
mp %= ', '.join(points)
return mp
def __dump_multilinestring(obj, fmt):
raise NotImplementedError
def __dump_multipolygon(obj, fmt):
raise NotImplementedError
def __dump_geometrycollection(obj, fmt):
raise NotImplementedError
__dumps_registry = {
'Point': __dump_point,
'LineString': __dump_linestring,
'Polygon': __dump_polygon,
'MultiPoint': __dump_multipoint,
'MultiLineString': __dump_multilinestring,
'MultiPolygon': __dump_multipolygon,
'GeometryCollection': __dump_geometrycollection,
}
def __load_point(tokens, string):
"""
:param tokens:
A generator of string tokens for the input WKT, begining just after the
geometry type. The geometry type is consumed before we get to here. For
example, if :func:`loads` is called with the input 'POINT(0.0 1.0)',
``tokens`` would generate the following values:
.. code-block:: python
['(', '0.0', '1.0', ')']
:param str string:
The original WKT string.
:returns:
A GeoJSON `dict` Point representation of the WKT ``string``.
"""
if not tokens.next() == '(':
raise ValueError(INVALID_WKT_FMT % string)
coords = []
try:
for t in tokens:
if t == ')':
break
else:
coords.append(float(t))
except tokenize.TokenError:
raise ValueError(INVALID_WKT_FMT % string)
return dict(type='Point', coordinates=coords)
def __load_linestring(tokens, string):
"""
Has similar inputs and return value to to :func:`__load_point`, except is
for handling LINESTRING geometry.
:returns:
A GeoJSON `dict` LineString representation of the WKT ``string``.
"""
if not tokens.next() == '(':
raise ValueError(INVALID_WKT_FMT % string)
# a list of lists
# each member list represents a point
coords = []
try:
pt = []
for t in tokens:
if t == ')':
coords.append(pt)
break
elif t == ',':
# it's the end of the point
coords.append(pt)
pt = []
else:
pt.append(float(t))
except tokenize.TokenError:
raise ValueError(INVALID_WKT_FMT % string)
return dict(type='LineString', coordinates=coords)
def __load_polygon(tokens, string):
"""
Has similar inputs and return value to to :func:`__load_point`, except is
for handling POLYGON geometry.
:returns:
A GeoJSON `dict` Polygon representation of the WKT ``string``.
"""
open_parens = tokens.next(), tokens.next()
if not open_parens == ('(', '('):
raise ValueError(INVALID_WKT_FMT % string)
# coords contains a list of rings
# each ring contains a list of points
# each point is a list of 2-4 values
coords = []
ring = []
on_ring = True
try:
pt = []
for t in tokens:
if t == ')' and on_ring:
# The ring is finished
ring.append(pt)
coords.append(ring)
on_ring = False
elif t == ')' and not on_ring:
# it's the end of the polygon
break
elif t == '(':
# it's a new ring
ring = []
pt = []
on_ring = True
elif t == ',' and on_ring:
# it's the end of a point
ring.append(pt)
pt = []
elif t == ',' and not on_ring:
# there's another ring.
# do nothing
pass
else:
pt.append(float(t))
except tokenize.TokenError:
raise ValueError(INVALID_WKT_FMT % string)
return dict(type='Polygon', coordinates=coords)
def __load_multipoint(tokens, string):
"""
Has similar inputs and return value to to :func:`__load_point`, except is
for handling MULTIPOINT geometry.
:returns:
A GeoJSON `dict` MultiPoint representation of the WKT ``string``.
"""
open_paren = tokens.next()
if not open_paren == '(':
raise ValueError(INVALID_WKT_FMT % string)
coords = []
pt = []
try:
for t in tokens:
if t in ('(', ')', ''):
# we can pretty much ignore parens and empty string tokens
pass
elif t == ',':
# the point is done
coords.append(pt)
pt = []
else:
pt.append(float(t))
except tokenize.TokenError:
raise ValueError(INVALID_WKT_FMT % string)
# Given the way we're parsing, we'll probably have to deal with the last
# point after the loop
if len(pt) > 0:
coords.append(pt)
return dict(type='MultiPoint', coordinates=coords)
def __load_multipolygon(tokens, string):
"""
Has similar inputs and return value to to :func:`__load_point`, except is
for handling MULTIPOLYGON geometry.
:returns:
A GeoJSON `dict` MultiPolygon representation of the WKT ``string``.
"""
open_paren = tokens.next()
if not open_paren == '(':
raise ValueError(INVALID_WKT_FMT % string)
polygons = []
while True:
try:
poly = __load_polygon(tokens, string)
polygons.append(poly['coordinates'])
t = tokens.next()
if t == ')':
# we're done; no more polygons.
break
except StopIteration:
# If we reach this, the WKT is not valid.
raise ValueError(INVALID_WKT_FMT % string)
return dict(type='MultiPolygon', coordinates=polygons)
def __load_multilinestring(tokens, string):
"""
Has similar inputs and return value to to :func:`__load_point`, except is
for handling MULTILINESTRING geometry.
:returns:
A GeoJSON `dict` MultiLineString representation of the WKT ``string``.
"""
open_paren = tokens.next()
if not open_paren == '(':
raise ValueError(INVALID_WKT_FMT % string)
linestrs = []
while True:
try:
linestr = __load_linestring(tokens, string)
linestrs.append(linestr['coordinates'])
t = tokens.next()
if t == ')':
# we're done; no more linestrings.
break
except StopIteration:
# If we reach this, the WKT is not valid.
raise ValueError(INVALID_WKT_FMT % string)
return dict(type='MultiLineString', coordinates=linestrs)
def __load_geometrycollection(tokens, string):
raise NotImplementedError
__loads_registry = {
'POINT': __load_point,
'LINESTRING': __load_linestring,
'POLYGON': __load_polygon,
'MULTIPOINT': __load_multipoint,
'MULTILINESTRING': __load_multilinestring,
'MULTIPOLYGON': __load_multipolygon,
'GEOMETRYCOLLECTION': __load_geometrycollection,
}
|
ycaihua/kbengine | refs/heads/master | kbe/src/lib/python/Tools/demo/markov.py | 107 | #!/usr/bin/env python3
"""
Markov chain simulation of words or characters.
"""
class Markov:
def __init__(self, histsize, choice):
self.histsize = histsize
self.choice = choice
self.trans = {}
def add(self, state, next):
self.trans.setdefault(state, []).append(next)
def put(self, seq):
n = self.histsize
add = self.add
add(None, seq[:0])
for i in range(len(seq)):
add(seq[max(0, i-n):i], seq[i:i+1])
add(seq[len(seq)-n:], None)
def get(self):
choice = self.choice
trans = self.trans
n = self.histsize
seq = choice(trans[None])
while True:
subseq = seq[max(0, len(seq)-n):]
options = trans[subseq]
next = choice(options)
if not next:
break
seq += next
return seq
def test():
import sys, random, getopt
args = sys.argv[1:]
try:
opts, args = getopt.getopt(args, '0123456789cdwq')
except getopt.error:
print('Usage: %s [-#] [-cddqw] [file] ...' % sys.argv[0])
print('Options:')
print('-#: 1-digit history size (default 2)')
print('-c: characters (default)')
print('-w: words')
print('-d: more debugging output')
print('-q: no debugging output')
print('Input files (default stdin) are split in paragraphs')
print('separated blank lines and each paragraph is split')
print('in words by whitespace, then reconcatenated with')
print('exactly one space separating words.')
print('Output consists of paragraphs separated by blank')
print('lines, where lines are no longer than 72 characters.')
sys.exit(2)
histsize = 2
do_words = False
debug = 1
for o, a in opts:
if '-0' <= o <= '-9': histsize = int(o[1:])
if o == '-c': do_words = False
if o == '-d': debug += 1
if o == '-q': debug = 0
if o == '-w': do_words = True
if not args:
args = ['-']
m = Markov(histsize, random.choice)
try:
for filename in args:
if filename == '-':
f = sys.stdin
if f.isatty():
print('Sorry, need stdin from file')
continue
else:
f = open(filename, 'r')
if debug: print('processing', filename, '...')
text = f.read()
f.close()
paralist = text.split('\n\n')
for para in paralist:
if debug > 1: print('feeding ...')
words = para.split()
if words:
if do_words:
data = tuple(words)
else:
data = ' '.join(words)
m.put(data)
except KeyboardInterrupt:
print('Interrupted -- continue with data read so far')
if not m.trans:
print('No valid input files')
return
if debug: print('done.')
if debug > 1:
for key in m.trans.keys():
if key is None or len(key) < histsize:
print(repr(key), m.trans[key])
if histsize == 0: print(repr(''), m.trans[''])
print()
while True:
data = m.get()
if do_words:
words = data
else:
words = data.split()
n = 0
limit = 72
for w in words:
if n + len(w) > limit:
print()
n = 0
print(w, end=' ')
n += len(w) + 1
print()
print()
if __name__ == "__main__":
test()
|
MTG/essentia | refs/heads/master | src/python/essentia/pytools/extractors/melspectrogram.py | 1 | # Copyright (C) 2006-2021 Music Technology Group - Universitat Pompeu Fabra
#
# This file is part of Essentia
#
# Essentia is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation (FSF), either version 3 of the License, or (at your
# option) any later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the Affero GNU General Public License
# version 3 along with this program. If not, see http://www.gnu.org/licenses/
import argparse
import os
import numpy as np
from essentia import Pool
from essentia import run
from essentia.streaming import (MonoLoader, FrameCutter, Windowing, Spectrum,
MelBands, UnaryOperator)
ZERO_PADDING = 0
WINDOW_TYPE = 'hann'
FRAME_SIZE = 1024
HOP_SIZE = 512
NUMBER_BANDS = 24
SAMPLE_RATE = 44100.
LOW_FREQUENCY_BOUND = 0.
HIGH_FREQUENCY_BOUND = 22050.
WARPING_FORMULA = 'htkMel'
WEIGHTING = 'warping'
NORMALIZE = 'unit_sum'
BANDS_TYPE = 'power'
COMPRESSION_TYPE = 'shift_scale_log'
def melspectrogram(filename, npy_file=None, force=False, verbose=False, sample_rate=SAMPLE_RATE, frame_size=FRAME_SIZE,
hop_size=HOP_SIZE, window_type=WINDOW_TYPE, zero_padding=ZERO_PADDING, low_frequency_bound=LOW_FREQUENCY_BOUND,
high_frequency_bound=HIGH_FREQUENCY_BOUND, number_bands=NUMBER_BANDS, warping_formula=WARPING_FORMULA,
weighting=WEIGHTING, normalize=NORMALIZE, bands_type=BANDS_TYPE, compression_type=COMPRESSION_TYPE):
"""Computes the mel spectrogram given the audio filename.
When the parameter `npy_file` is specified, the data is saved to disk as a numpy array (.npy).
Use the parameter `force` to overwrite the numpy array in case it already exists.
The rest of parameters are directly mapped to Essentia algorithms as explained below.
Note: this functionality is also available as a command line script.
Parameters:
sample_rate:
real โ (0,inf) (default = 44100)
the desired output sampling rate [Hz]
frame_size:
integer โ [1,inf) (default = 1024)
the output frame size
hop_size:
integer โ [1,inf) (default = 512)
the hop size between frames
window_type:
string โ {hamming,hann,hannnsgcq,triangular,square,blackmanharris62,blackmanharris70,blackmanharris74,blackmanharris92} (default = "hann")
the window type, which can be 'hamming', 'hann', 'triangular', 'square' or 'blackmanharrisXX'
zero_padding:
integer โ [0,inf) (default = 0)
the size of the zero-padding
low_frequency_bound:
real โ [0,inf) (default = 0)
a lower-bound limit for the frequencies to be included in the bands
high_frequency_bound:
real โ [0,inf) (default = 22050)
an upper-bound limit for the frequencies to be included in the bands
number_bands:
integer โ (1,inf) (default = 24)
the number of output bands
warping_formula:
string โ {slaneyMel,htkMel} (default = "htkMel")
The scale implementation type: 'htkMel' scale from the HTK toolkit [2, 3]
(default) or 'slaneyMel' scale from the Auditory toolbox [4]
weighting:
string โ {warping,linear} (default = "warping")
type of weighting function for determining triangle area
normalize:
string โ {unit_sum,unit_tri,unit_max} (default = "unit_sum")
spectrum bin weights to use for each mel band: 'unit_max' to make each mel
band vertex equal to 1, 'unit_sum' to make each mel band area equal to 1
summing the actual weights of spectrum bins, 'unit_area' to make each
triangle mel band area equal to 1 normalizing the weights of each triangle
by its bandwidth
bands_type:
string โ {magnitude,power} (default = "power")
'power' to output squared units, 'magnitude' to keep it as the input
compression_type:
string โ {dB,shift_scale_log,none} (default = "shift_scale_log")
the compression type to use.
'shift_scale_log' is log10(10000 * x + 1)
'dB' is 10 * log10(x)
Returns:
(2D array): The mel-spectrogram.
"""
padded_size = frame_size + zero_padding
spectrum_size = (padded_size) // 2 + 1
# In case we want to save the melbands to a file
# check if the file already exists
if npy_file:
if not npy_file.endswith('.npy'):
npy_file += '.npy'
if not force:
if os.path.exists(npy_file):
if verbose:
print('Skipping "{}"'.format(npy_file))
return
pool = Pool()
loader = MonoLoader(filename=filename,
sampleRate=sample_rate)
frameCutter = FrameCutter(frameSize=frame_size,
hopSize=hop_size)
w = Windowing(zeroPadding=zero_padding,
type=window_type,
normalized=False) # None of the mel bands extraction methods
# we have seen requires window-level normalization.
spec = Spectrum(size=padded_size)
mels = MelBands(inputSize=spectrum_size,
numberBands=number_bands,
sampleRate=sample_rate,
lowFrequencyBound=low_frequency_bound,
highFrequencyBound=high_frequency_bound,
warpingFormula=warping_formula,
weighting=weighting,
normalize=normalize,
type=bands_type,
log=False) # Do not compute any compression here.
# Use the `UnaryOperator`s methods before
# in case a new compression type is required.
if compression_type.lower() == 'db':
shift = UnaryOperator(type='identity')
compressor = UnaryOperator(type='lin2db')
elif compression_type.lower() == 'shift_scale_log':
shift = UnaryOperator(type='identity', scale=1e4, shift=1)
compressor = UnaryOperator(type='log10')
elif compression_type.lower() == 'none':
shift = UnaryOperator(type='identity')
compressor = UnaryOperator(type='identity')
loader.audio >> frameCutter.signal
frameCutter.frame >> w.frame >> spec.frame
spec.spectrum >> mels.spectrum
mels.bands >> shift.array >> compressor.array >> (pool, 'mel_bands')
run(loader)
mel_bands = np.array(pool['mel_bands'])
if npy_file:
np.save(npy_file, mel_bands)
if verbose:
print('Done for "{}"'.format(npy_file))
return mel_bands
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Computes the mel spectrogram of a given audio file.')
parser.add_argument('filename',
help='the name of the file from which to read')
parser.add_argument('npy_file', type=str,
help='the name of the output file')
parser.add_argument('--force', '-f', action='store_true',
help='whether to recompute if the output file already exists')
parser.add_argument('--verbose', '-v', action='store_true',
help='whether to print out status to the standard output')
parser.add_argument('--sample-rate', '-sr', type=float, default=SAMPLE_RATE,
help='the sample rate')
parser.add_argument('--frame-size', '-fs', type=int, default=FRAME_SIZE,
help='the output frame size')
parser.add_argument('--hop-size', '-hs', type=int, default=HOP_SIZE,
help='the hop size between frames')
parser.add_argument('--window-type', '-wt', type=str, default=WINDOW_TYPE,
help='window type', choices=('hamming', 'hann', 'hannnsgcq', 'triangular', 'square', 'blackmanharris62',
'blackmanharris70', 'blackmanharris74', 'blackmanharris92'))
parser.add_argument('--zero-padding', '-zp', type=int, default=ZERO_PADDING,
help='the size of the zero-padding')
parser.add_argument('--low-frequency-bound', '-lf', type=float, default=LOW_FREQUENCY_BOUND,
help='a lower-bound limit for the frequencies to be included in the bands')
parser.add_argument('--high-frequency-bound', '-hf', type=float, default=HIGH_FREQUENCY_BOUND,
help='an upper-bound limit for the frequencies to be included in the bands')
parser.add_argument('--number-bands', '-nb', type=int, default=NUMBER_BANDS,
help='the number of output bands')
parser.add_argument('--warping-formula', '-wf', type=str, default=WARPING_FORMULA, choices=('slaneyMel','htkMel'),
help='the scale implementation type: `htkMel` scale from the HTK toolkit(default) or `slaneyMel` scale from the Auditory toolbox')
parser.add_argument('--weighting', '-we', type=str, default=WEIGHTING, choices=('warping','linear'),
help='type of weighting function for determining triangle area')
parser.add_argument('--normalize', '-n', type=str, default=NORMALIZE, choices=('unit_sum', 'unit_tri', 'unit_max'),
help='spectrum bin weights to use for each mel band: `unit_max` to make each mel band vertex equal to 1, `unit_sum` to make each mel band area equal to 1 summing the actual weights of spectrum bins, `unit_area` to make each triangle mel band area equal to 1 normalizing the weights of each triangle by its bandwidth')
parser.add_argument('--bands-type', '-bt', type=str, default=BANDS_TYPE, choices=('magnitude','power'),
help='`power` to output squared units, `magnitude` to keep it as the input')
parser.add_argument('--compression-type', '-ct', type=str, default=COMPRESSION_TYPE, choices=('dB', 'shift_scale_log', 'none'),
help='dB: 10log10(x). shift_scale_log: log(1 + 10000 * x)')
melspectrogram(**vars(parser.parse_args()))
|
raajitr/django_hangman | refs/heads/master | env/lib/python2.7/site-packages/django/contrib/gis/utils/__init__.py | 8 | """
This module contains useful utilities for GeoDjango.
"""
from django.contrib.gis.gdal import HAS_GDAL
from django.contrib.gis.utils.wkt import precision_wkt # NOQA
from django.core.exceptions import ImproperlyConfigured
if HAS_GDAL:
from django.contrib.gis.utils.ogrinfo import ogrinfo # NOQA
from django.contrib.gis.utils.ogrinspect import mapping, ogrinspect # NOQA
from django.contrib.gis.utils.srs import add_srs_entry # NOQA
try:
# LayerMapping requires DJANGO_SETTINGS_MODULE to be set,
# so this needs to be in try/except.
from django.contrib.gis.utils.layermapping import LayerMapping, LayerMapError # NOQA
except ImproperlyConfigured:
pass
|
pignacio/var-log-recetas | refs/heads/master | var_log_recetas/recipe/models.py | 1 | from django.db import models
from django.utils.translation import ugettext as _
# Create your models here.
class MeasuredIngredient(models.Model):
ingredient = models.ForeignKey('ingredient.Ingredient')
unit = models.ForeignKey('ingredient.MeasureUnit')
amount = models.FloatField(_('Amount'))
subrecipe = models.ForeignKey('SubRecipe')
def __unicode__(self):
return "{:2f} {} of {}".format(self.amount, self.unit.name,
self.ingredient.name)
class Step(models.Model):
subrecipe = models.ForeignKey('SubRecipe')
text = models.TextField()
position = models.PositiveIntegerField()
def save(self, *args, **kwargs):
if self.position is None:
max_position = self.subrecipe.step_set.aggregate(
models.Max('position'))['position__max'] or 0
self.position = max_position + 1
super(Step, self).save(*args, **kwargs)
def __unicode__(self):
return "Step #{} of {}: {}".format(self.position, self.subrecipe,
self.text)
class Meta:
ordering = ('position',)
class Recipe(models.Model):
title = models.CharField(max_length=200)
tags = models.ManyToManyField('Tags', blank=True)
def as_json(self):
output = {}
output['title'] = self.title
output['subrecipes'] = [sr.as_json()
for sr in self.subrecipe_set.all()]
return output
def __unicode__(self):
return self.title
class SubRecipe(models.Model):
recipe = models.ForeignKey(Recipe)
position = models.PositiveIntegerField()
title = models.CharField(max_length=200)
def save(self, *args, **kwargs):
if self.position is None:
max_position = self.recipe.subrecipe_set.aggregate(
models.Max('position'))['position__max'] or 0
self.position = max_position + 1
super(SubRecipe, self).save(*args, **kwargs)
def as_json(self):
output = {}
output['title'] = self.title
output['ingredients'] = [{
'amount': i.amount,
'unit': i.unit.name,
'ingredient': i.ingredient.name,
} for i in self.measuredingredient_set.all()]
output['steps'] = [s.text for s in self.step_set.all()]
return output
def __unicode__(self):
return "{s.recipe} part #{s.position}: {s.title}".format(s=self)
class Tags(models.Model):
name = models.CharField(max_length=200, unique=True)
def __unicode__(self):
return "#" + self.name
|
amunk/metagoofil | refs/heads/master | hachoir_parser/archive/ace.py | 95 | """
ACE parser
From wotsit.org and the SDK header (bitflags)
Partial study of a new block type (5) I've called "new_recovery", as its
syntax is very close to the former one (of type 2).
Status: can only read totally file and header blocks.
Author: Christophe Gisquet <christophe.gisquet@free.fr>
Creation date: 19 january 2006
"""
from hachoir_parser import Parser
from hachoir_core.field import (StaticFieldSet, FieldSet,
Bit, Bits, NullBits, RawBytes, Enum,
UInt8, UInt16, UInt32,
PascalString8, PascalString16, String,
TimeDateMSDOS32)
from hachoir_core.text_handler import textHandler, filesizeHandler, hexadecimal
from hachoir_core.endian import LITTLE_ENDIAN
from hachoir_parser.common.msdos import MSDOSFileAttr32
MAGIC = "**ACE**"
OS_MSDOS = 0
OS_WIN32 = 2
HOST_OS = {
0: "MS-DOS",
1: "OS/2",
2: "Win32",
3: "Unix",
4: "MAC-OS",
5: "Win NT",
6: "Primos",
7: "APPLE GS",
8: "ATARI",
9: "VAX VMS",
10: "AMIGA",
11: "NEXT",
}
COMPRESSION_TYPE = {
0: "Store",
1: "Lempel-Ziv 77",
2: "ACE v2.0",
}
COMPRESSION_MODE = {
0: "fastest",
1: "fast",
2: "normal",
3: "good",
4: "best",
}
# TODO: Computing the CRC16 would also prove useful
#def markerValidate(self):
# return not self["extend"].value and self["signature"].value == MAGIC and \
# self["host_os"].value<12
class MarkerFlags(StaticFieldSet):
format = (
(Bit, "extend", "Whether the header is extended"),
(Bit, "has_comment", "Whether the archive has a comment"),
(NullBits, "unused", 7, "Reserved bits"),
(Bit, "sfx", "SFX"),
(Bit, "limited_dict", "Junior SFX with 256K dictionary"),
(Bit, "multi_volume", "Part of a set of ACE archives"),
(Bit, "has_av_string", "This header holds an AV-string"),
(Bit, "recovery_record", "Recovery record preset"),
(Bit, "locked", "Archive is locked"),
(Bit, "solid", "Archive uses solid compression")
)
def markerFlags(self):
yield MarkerFlags(self, "flags", "Marker flags")
def markerHeader(self):
yield String(self, "signature", 7, "Signature")
yield UInt8(self, "ver_extract", "Version needed to extract archive")
yield UInt8(self, "ver_created", "Version used to create archive")
yield Enum(UInt8(self, "host_os", "OS where the files were compressed"), HOST_OS)
yield UInt8(self, "vol_num", "Volume number")
yield TimeDateMSDOS32(self, "time", "Date and time (MS DOS format)")
yield Bits(self, "reserved", 64, "Reserved size for future extensions")
flags = self["flags"]
if flags["has_av_string"].value:
yield PascalString8(self, "av_string", "AV String")
if flags["has_comment"].value:
size = filesizeHandler(UInt16(self, "comment_size", "Comment size"))
yield size
if size.value > 0:
yield RawBytes(self, "compressed_comment", size.value, \
"Compressed comment")
class FileFlags(StaticFieldSet):
format = (
(Bit, "extend", "Whether the header is extended"),
(Bit, "has_comment", "Presence of file comment"),
(Bits, "unused", 10, "Unused bit flags"),
(Bit, "encrypted", "File encrypted with password"),
(Bit, "previous", "File continued from previous volume"),
(Bit, "next", "File continues on the next volume"),
(Bit, "solid", "File compressed using previously archived files")
)
def fileFlags(self):
yield FileFlags(self, "flags", "File flags")
def fileHeader(self):
yield filesizeHandler(UInt32(self, "compressed_size", "Size of the compressed file"))
yield filesizeHandler(UInt32(self, "uncompressed_size", "Uncompressed file size"))
yield TimeDateMSDOS32(self, "ftime", "Date and time (MS DOS format)")
if self["/header/host_os"].value in (OS_MSDOS, OS_WIN32):
yield MSDOSFileAttr32(self, "file_attr", "File attributes")
else:
yield textHandler(UInt32(self, "file_attr", "File attributes"), hexadecimal)
yield textHandler(UInt32(self, "file_crc32", "CRC32 checksum over the compressed file)"), hexadecimal)
yield Enum(UInt8(self, "compression_type", "Type of compression"), COMPRESSION_TYPE)
yield Enum(UInt8(self, "compression_mode", "Quality of compression"), COMPRESSION_MODE)
yield textHandler(UInt16(self, "parameters", "Compression parameters"), hexadecimal)
yield textHandler(UInt16(self, "reserved", "Reserved data"), hexadecimal)
# Filename
yield PascalString16(self, "filename", "Filename")
# Comment
if self["flags/has_comment"].value:
yield filesizeHandler(UInt16(self, "comment_size", "Size of the compressed comment"))
if self["comment_size"].value > 0:
yield RawBytes(self, "comment_data", self["comment_size"].value, "Comment data")
def fileBody(self):
size = self["compressed_size"].value
if size > 0:
yield RawBytes(self, "compressed_data", size, "Compressed data")
def fileDesc(self):
return "File entry: %s (%s)" % (self["filename"].value, self["compressed_size"].display)
def recoveryHeader(self):
yield filesizeHandler(UInt32(self, "rec_blk_size", "Size of recovery data"))
self.body_size = self["rec_blk_size"].size
yield String(self, "signature", 7, "Signature, normally '**ACE**'")
yield textHandler(UInt32(self, "relative_start",
"Relative start (to this block) of the data this block is mode of"),
hexadecimal)
yield UInt32(self, "num_blocks", "Number of blocks the data is split into")
yield UInt32(self, "size_blocks", "Size of these blocks")
yield UInt16(self, "crc16_blocks", "CRC16 over recovery data")
# size_blocks blocks of size size_blocks follow
# The ultimate data is the xor data of all those blocks
size = self["size_blocks"].value
for index in xrange(self["num_blocks"].value):
yield RawBytes(self, "data[]", size, "Recovery block %i" % index)
yield RawBytes(self, "xor_data", size, "The XOR value of the above data blocks")
def recoveryDesc(self):
return "Recovery block, size=%u" % self["body_size"].display
def newRecoveryHeader(self):
"""
This header is described nowhere
"""
if self["flags/extend"].value:
yield filesizeHandler(UInt32(self, "body_size", "Size of the unknown body following"))
self.body_size = self["body_size"].value
yield textHandler(UInt32(self, "unknown[]", "Unknown field, probably 0"),
hexadecimal)
yield String(self, "signature", 7, "Signature, normally '**ACE**'")
yield textHandler(UInt32(self, "relative_start",
"Offset (=crc16's) of this block in the file"), hexadecimal)
yield textHandler(UInt32(self, "unknown[]",
"Unknown field, probably 0"), hexadecimal)
class BaseFlags(StaticFieldSet):
format = (
(Bit, "extend", "Whether the header is extended"),
(NullBits, "unused", 15, "Unused bit flags")
)
def parseFlags(self):
yield BaseFlags(self, "flags", "Unknown flags")
def parseHeader(self):
if self["flags/extend"].value:
yield filesizeHandler(UInt32(self, "body_size", "Size of the unknown body following"))
self.body_size = self["body_size"].value
def parseBody(self):
if self.body_size > 0:
yield RawBytes(self, "body_data", self.body_size, "Body data, unhandled")
class Block(FieldSet):
TAG_INFO = {
0: ("header", "Archiver header", markerFlags, markerHeader, None),
1: ("file[]", fileDesc, fileFlags, fileHeader, fileBody),
2: ("recovery[]", recoveryDesc, recoveryHeader, None, None),
5: ("new_recovery[]", None, None, newRecoveryHeader, None)
}
def __init__(self, parent, name, description=None):
FieldSet.__init__(self, parent, name, description)
self.body_size = 0
self.desc_func = None
type = self["block_type"].value
if type in self.TAG_INFO:
self._name, desc, self.parseFlags, self.parseHeader, self.parseBody = self.TAG_INFO[type]
if desc:
if isinstance(desc, str):
self._description = desc
else:
self.desc_func = desc
else:
self.warning("Processing as unknown block block of type %u" % type)
if not self.parseFlags:
self.parseFlags = parseFlags
if not self.parseHeader:
self.parseHeader = parseHeader
if not self.parseBody:
self.parseBody = parseBody
def createFields(self):
yield textHandler(UInt16(self, "crc16", "Archive CRC16 (from byte 4 on)"), hexadecimal)
yield filesizeHandler(UInt16(self, "head_size", "Block size (from byte 4 on)"))
yield UInt8(self, "block_type", "Block type")
# Flags
for flag in self.parseFlags(self):
yield flag
# Rest of the header
for field in self.parseHeader(self):
yield field
size = self["head_size"].value - (self.current_size//8) + (2+2)
if size > 0:
yield RawBytes(self, "extra_data", size, "Extra header data, unhandled")
# Body in itself
for field in self.parseBody(self):
yield field
def createDescription(self):
if self.desc_func:
return self.desc_func(self)
else:
return "Block: %s" % self["type"].display
class AceFile(Parser):
endian = LITTLE_ENDIAN
PARSER_TAGS = {
"id": "ace",
"category": "archive",
"file_ext": ("ace",),
"mime": (u"application/x-ace-compressed",),
"min_size": 50*8,
"description": "ACE archive"
}
def validate(self):
if self.stream.readBytes(7*8, len(MAGIC)) != MAGIC:
return "Invalid magic"
return True
def createFields(self):
while not self.eof:
yield Block(self, "block[]")
|
singe/sslstrip2 | refs/heads/master | sslstrip/SSLServerConnection.py | 32 | # Copyright (c) 2004-2009 Moxie Marlinspike
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
# USA
#
import logging, re, string
from ServerConnection import ServerConnection
class SSLServerConnection(ServerConnection):
'''
For SSL connections to a server, we need to do some additional stripping. First we need
to make note of any relative links, as the server will be expecting those to be requested
via SSL as well. We also want to slip our favicon in here and kill the secure bit on cookies.
'''
cookieExpression = re.compile(r"([ \w\d:#@%/;$()~_?\+-=\\\.&]+); ?Secure", re.IGNORECASE)
cssExpression = re.compile(r"url\(([\w\d:#@%/;$~_?\+-=\\\.&]+)\)", re.IGNORECASE)
iconExpression = re.compile(r"<link rel=\"shortcut icon\" .*href=\"([\w\d:#@%/;$()~_?\+-=\\\.&]+)\".*>", re.IGNORECASE)
linkExpression = re.compile(r"<((a)|(link)|(img)|(script)|(frame)) .*((href)|(src))=\"([\w\d:#@%/;$()~_?\+-=\\\.&]+)\".*>", re.IGNORECASE)
headExpression = re.compile(r"<head>", re.IGNORECASE)
def __init__(self, command, uri, postData, headers, client):
ServerConnection.__init__(self, command, uri, postData, headers, client)
def getLogLevel(self):
return logging.INFO
def getPostPrefix(self):
return "SECURE POST"
def handleHeader(self, key, value):
if (key.lower() == 'set-cookie'):
newvalues =[]
value = SSLServerConnection.cookieExpression.sub("\g<1>", value)
values = value.split(';')
for v in values:
if v[:7].lower()==' domain':
dominio=v.split("=")[1]
logging.debug("LEO Parsing cookie domain parameter: %s"%v)
real = self.urlMonitor.sustitucion
if dominio in real:
v=" Domain=%s"%real[dominio]
logging.debug("LEO New cookie domain parameter: %s"%v)
newvalues.append(v)
value = ';'.join(newvalues)
if (key.lower() == 'access-control-allow-origin'):
value='*'
ServerConnection.handleHeader(self, key, value)
def stripFileFromPath(self, path):
(strippedPath, lastSlash, file) = path.rpartition('/')
return strippedPath
def buildAbsoluteLink(self, link):
absoluteLink = ""
if ((not link.startswith('http')) and (not link.startswith('/'))):
absoluteLink = "http://"+self.headers['host']+self.stripFileFromPath(self.uri)+'/'+link
logging.debug("Found path-relative link in secure transmission: " + link)
logging.debug("New Absolute path-relative link: " + absoluteLink)
elif not link.startswith('http'):
absoluteLink = "http://"+self.headers['host']+link
logging.debug("Found relative link in secure transmission: " + link)
logging.debug("New Absolute link: " + absoluteLink)
if not absoluteLink == "":
absoluteLink = absoluteLink.replace('&', '&')
self.urlMonitor.addSecureLink(self.client.getClientIP(), absoluteLink);
def replaceCssLinks(self, data):
iterator = re.finditer(SSLServerConnection.cssExpression, data)
for match in iterator:
self.buildAbsoluteLink(match.group(1))
return data
def replaceFavicon(self, data):
match = re.search(SSLServerConnection.iconExpression, data)
if (match != None):
data = re.sub(SSLServerConnection.iconExpression,
"<link rel=\"SHORTCUT ICON\" href=\"/favicon-x-favicon-x.ico\">", data)
else:
data = re.sub(SSLServerConnection.headExpression,
"<head><link rel=\"SHORTCUT ICON\" href=\"/favicon-x-favicon-x.ico\">", data)
return data
def replaceSecureLinks(self, data):
data = ServerConnection.replaceSecureLinks(self, data)
data = self.replaceCssLinks(data)
if (self.urlMonitor.isFaviconSpoofing()):
data = self.replaceFavicon(data)
iterator = re.finditer(SSLServerConnection.linkExpression, data)
for match in iterator:
self.buildAbsoluteLink(match.group(10))
return data
|
DansGit/qnatool | refs/heads/master | analyze/parsedatetime/tests/TestRanges.py | 3 |
"""
Test parsing of simple date and times
"""
import unittest, time, datetime
import parsedatetime as pdt
# a special compare function is used to allow us to ignore the seconds as
# the running of the test could cross a minute boundary
def _compareResults(result, check):
targetStart, targetEnd, t_flag = result
valueStart, valueEnd, v_flag = check
t1_yr, t1_mth, t1_dy, t1_hr, t1_min, _, _, _, _ = targetStart
v1_yr, v1_mth, v1_dy, v1_hr, v1_min, _, _, _, _ = valueStart
t2_yr, t2_mth, t2_dy, t2_hr, t2_min, _, _, _, _ = targetEnd
v2_yr, v2_mth, v2_dy, v2_hr, v2_min, _, _, _, _ = valueEnd
return ((t1_yr == v1_yr) and (t1_mth == v1_mth) and (t1_dy == v1_dy) and (t1_hr == v1_hr) and
(t1_min == v1_min) and (t2_yr == v2_yr) and (t2_mth == v2_mth) and (t2_dy == v2_dy) and
(t2_hr == v2_hr) and (t2_min == v2_min) and (t_flag == v_flag))
class test(unittest.TestCase):
def setUp(self):
self.cal = pdt.Calendar()
self.yr, self.mth, self.dy, self.hr, self.mn, self.sec, self.wd, self.yd, self.isdst = time.localtime()
def testTimes(self):
start = datetime.datetime(self.yr, self.mth, self.dy, self.hr, self.mn, self.sec).timetuple()
targetStart = datetime.datetime(self.yr, self.mth, self.dy, 14, 0, 0).timetuple()
targetEnd = datetime.datetime(self.yr, self.mth, self.dy, 17, 30, 0).timetuple()
self.assertTrue(_compareResults(self.cal.evalRanges("2 pm - 5:30 pm", start), (targetStart, targetEnd, 2)))
self.assertTrue(_compareResults(self.cal.evalRanges("2pm - 5:30pm", start), (targetStart, targetEnd, 2)))
self.assertTrue(_compareResults(self.cal.evalRanges("2:00:00 pm - 5:30:00 pm", start), (targetStart, targetEnd, 2)))
self.assertTrue(_compareResults(self.cal.evalRanges("2 - 5:30pm", start), (targetStart, targetEnd, 2)))
self.assertTrue(_compareResults(self.cal.evalRanges("14:00 - 17:30", start), (targetStart, targetEnd, 2)))
targetStart = datetime.datetime(self.yr, self.mth, self.dy, 10, 0, 0).timetuple()
targetEnd = datetime.datetime(self.yr, self.mth, self.dy, 13, 30, 0).timetuple()
self.assertTrue(_compareResults(self.cal.evalRanges("10AM - 1:30PM", start), (targetStart, targetEnd, 2)))
self.assertTrue(_compareResults(self.cal.evalRanges("10:00:00 am - 1:30:00 pm", start), (targetStart, targetEnd, 2)))
self.assertTrue(_compareResults(self.cal.evalRanges("10:00 - 13:30", start), (targetStart, targetEnd, 2)))
targetStart = datetime.datetime(self.yr, self.mth, self.dy, 15, 30, 0).timetuple()
targetEnd = datetime.datetime(self.yr, self.mth, self.dy, 17, 0, 0).timetuple()
self.assertTrue(_compareResults(self.cal.evalRanges("today 3:30-5PM", start), (targetStart, targetEnd, 2)))
def testDates(self):
start = datetime.datetime(self.yr, self.mth, self.dy, self.hr, self.mn, self.sec).timetuple()
targetStart = datetime.datetime(2006, 8, 29, self.hr, self.mn, self.sec).timetuple()
targetEnd = datetime.datetime(2006, 9, 2,self.hr, self.mn, self.sec).timetuple()
self.assertTrue(_compareResults(self.cal.evalRanges("August 29, 2006 - September 2, 2006", start), (targetStart, targetEnd, 1)))
self.assertTrue(_compareResults(self.cal.evalRanges("August 29 - September 2, 2006", start), (targetStart, targetEnd, 1)))
targetStart = datetime.datetime(2006, 8, 29, self.hr, self.mn, self.sec).timetuple()
targetEnd = datetime.datetime(2006, 9, 2, self.hr, self.mn, self.sec).timetuple()
self.assertTrue(_compareResults(self.cal.evalRanges("08/29/06 - 09/02/06", start), (targetStart, targetEnd, 1)))
#def testSubRanges(self):
# start = datetime.datetime(self.yr, self.mth, self.dy, self.hr, self.mn, self.sec).timetuple()
# targetStart = datetime.datetime(2006, 8, 1, 9, 0, 0).timetuple()
# targetEnd = datetime.datetime(2006, 8, 15, 9, 0, 0).timetuple()
# self.assertTrue(_compareResults(self.cal.evalRanges("August 1-15, 2006", start), (targetStart, targetEnd, 1)))
if __name__ == "__main__":
unittest.main()
|
turdusmerula/kipartman | refs/heads/master | kipartman/frames/project_frame.py | 1 | from dialogs.dialog_project import DialogProject
from frames.buy_frame import BuyFrame
from frames.bom_frame import BomFrame
from frames.schematic_frame import SchematicFrame
from frames.configuration_frame import ConfigurationFrame
from helper.exception import print_stack
import helper.tree
import os
import wx
from kicad.kicad_project import KicadProject
class DataModelFilePath(helper.tree.TreeContainerItem):
def __init__(self, path):
super(DataModelFilePath, self).__init__()
self.path = path
def GetValue(self, col):
vMap = {
0 : os.path.basename(self.path),
}
return vMap[col]
def GetAttr(self, col, attr):
attr.Bold = True
return True
class DataModelFile(helper.tree.TreeContainerItem):
def __init__(self, path, name):
super(DataModelFile, self).__init__()
self.path = os.path.join(path, name)
self.name = name
def GetValue(self, col):
vMap = {
0 : self.name,
}
return vMap[col]
def GetAttr(self, col, attr):
attr.Bold = False
return True
class TreeManagerFiles(helper.tree.TreeManager):
def __init__(self, tree_view, *args, **kwargs):
super(TreeManagerFiles, self).__init__(tree_view, *args, **kwargs)
def FindPath(self, path):
for data in self.data:
if isinstance(data, DataModelFilePath) and data.path==os.path.normpath(path):
return data
return None
def FindFile(self, path, name):
pathobj = self.FindPath(path)
for data in self.data:
if isinstance(data, DataModelFile) and data.name==name and data.parent==pathobj:
return data
return None
def AppendPath(self, path):
pathobj = self.FindPath(path)
if pathobj:
return pathobj
pathobj = DataModelFilePath(path)
parentpath = self.FindPath(os.path.dirname(path))
self.AppendItem(parentpath, pathobj)
return pathobj
def AppendFile(self, path, name):
fileobj = self.FindFile(path, name)
if fileobj:
return fileobj
pathobj = self.FindPath(path)
fileobj = DataModelFile(path, name)
self.AppendItem(pathobj, fileobj)
class ProjectFrame(DialogProject):
def __init__(self, parent, project_path):
DialogProject.__init__(self, parent)
self.project_path = project_path
self.menus = self.menu_bar.GetMenus()
self.kicad_project = KicadProject(self.project_path)
self.kicad_project.on_change_hook = self.onProjectFileChanged
# create libraries data
self.tree_project_manager = TreeManagerFiles(self.tree_project, context_menu=self.menu_project)
self.tree_project_manager.AddTextColumn("name")
self.tree_project_manager.OnSelectionChanged = self.onTreeProjectSelChanged
self.tree_project_manager.OnItemBeforeContextMenu = self.onTreeProjectBeforeContextMenu
self.load()
self.pages = []
def onMenuViewConfigurationSelection( self, event ):
ConfigurationFrame(self).ShowModal()
def OnMenuItem( self, event ):
self.pages[self.notebook.GetSelection()].OnMenuItem(event)
def onProjectFileChanged(self, path):
self.kicad_project.Enabled(False)
# do a synchronize when a file change on disk
self.load()
# reload pages
for page in self.pages:
if path.endswith(".bom") and isinstance(page, BomFrame):
page.reload()
elif path.endswith(".sch") and isinstance(page, SchematicFrame):
page.reload()
self.kicad_project.Enabled(True)
def load(self):
try:
self.loadFiles()
except Exception as e:
print_stack()
wx.MessageBox(format(e), 'Project load failed', wx.OK | wx.ICON_ERROR)
def loadFiles(self):
self.kicad_project.Load()
self.tree_project_manager.SaveState()
# load libraries tree
for file_path in self.kicad_project.files:
# decompose path
folders = []
file_name = os.path.basename(file_path)
path = os.path.dirname(file_path)
while path!='' and path!='/':
folders.insert(0, path)
path = os.path.dirname(path)
file_path = self.kicad_project.root_path
for folder in folders:
file_path = os.path.join(self.kicad_project.root_path, folder)
pathobj = self.tree_project_manager.FindPath(folder)
if self.tree_project_manager.DropStateObject(pathobj)==False:
self.tree_project_manager.AppendPath(folder)
fileobj = self.tree_project_manager.FindFile(file_path, file_name)
if self.tree_project_manager.DropStateObject(fileobj)==False:
self.tree_project_manager.AppendFile(file_path, file_name)
self.tree_project_manager.PurgeState()
def onTreeProjectSelChanged( self, event ):
item = self.tree_project.GetSelection()
if item.IsOk()==False:
return
pathobj = self.tree_project_manager.ItemToObject(item)
def onTreeProjectBeforeContextMenu( self, event ):
item = self.tree_project.GetSelection()
if item.IsOk()==False:
return
obj = self.tree_project_manager.ItemToObject(item)
def onButtonRefreshProjectClick( self, event ):
self.load()
def onMenuProjectOpenSelection( self, event ):
item = self.tree_project.GetSelection()
if item.IsOk()==False:
return
obj = self.tree_project_manager.ItemToObject(item)
if isinstance(obj, DataModelFile):
# if obj.name.endswith(".bom"):
# bom = BomFrame(self.notebook)
# self.pages.append(bom)
# self.notebook.AddPage(bom, obj.path, False)
if obj.name.endswith(".sch"):
sch = SchematicFrame(self.notebook, obj.path)
self.pages.append(sch)
self.notebook.AddPage(sch, obj.name, False)
def onMenuProjectNewBomSelection( self, event ):
pass
|
abdullah2891/remo | refs/heads/master | vendor-local/lib/python/rest_framework/templatetags/rest_framework.py | 13 | from __future__ import unicode_literals, absolute_import
from django import template
from django.core.urlresolvers import reverse, NoReverseMatch
from django.utils import six
from django.utils.encoding import iri_to_uri, force_text
from django.utils.html import escape
from django.utils.safestring import SafeData, mark_safe
from django.utils.html import smart_urlquote
from rest_framework.renderers import HTMLFormRenderer
from rest_framework.utils.urls import replace_query_param
import re
register = template.Library()
# Regex for adding classes to html snippets
class_re = re.compile(r'(?<=class=["\'])(.*)(?=["\'])')
@register.simple_tag
def get_pagination_html(pager):
return pager.to_html()
@register.simple_tag
def render_field(field, style=None):
style = style or {}
renderer = style.get('renderer', HTMLFormRenderer())
return renderer.render_field(field, style)
@register.simple_tag
def optional_login(request):
"""
Include a login snippet if REST framework's login view is in the URLconf.
"""
try:
login_url = reverse('rest_framework:login')
except NoReverseMatch:
return ''
snippet = "<li><a href='{href}?next={next}'>Log in</a></li>".format(href=login_url, next=escape(request.path))
return snippet
@register.simple_tag
def optional_logout(request, user):
"""
Include a logout snippet if REST framework's logout view is in the URLconf.
"""
try:
logout_url = reverse('rest_framework:logout')
except NoReverseMatch:
return '<li class="navbar-text">{user}</li>'.format(user=user)
snippet = """<li class="dropdown">
<a href="#" class="dropdown-toggle" data-toggle="dropdown">
{user}
<b class="caret"></b>
</a>
<ul class="dropdown-menu">
<li><a href='{href}?next={next}'>Log out</a></li>
</ul>
</li>"""
return snippet.format(user=user, href=logout_url, next=escape(request.path))
@register.simple_tag
def add_query_param(request, key, val):
"""
Add a query parameter to the current request url, and return the new url.
"""
iri = request.get_full_path()
uri = iri_to_uri(iri)
return escape(replace_query_param(uri, key, val))
@register.filter
def add_class(value, css_class):
"""
http://stackoverflow.com/questions/4124220/django-adding-css-classes-when-rendering-form-fields-in-a-template
Inserts classes into template variables that contain HTML tags,
useful for modifying forms without needing to change the Form objects.
Usage:
{{ field.label_tag|add_class:"control-label" }}
In the case of REST Framework, the filter is used to add Bootstrap-specific
classes to the forms.
"""
html = six.text_type(value)
match = class_re.search(html)
if match:
m = re.search(r'^%s$|^%s\s|\s%s\s|\s%s$' % (css_class, css_class,
css_class, css_class),
match.group(1))
if not m:
return mark_safe(class_re.sub(match.group(1) + " " + css_class,
html))
else:
return mark_safe(html.replace('>', ' class="%s">' % css_class, 1))
return value
# Bunch of stuff cloned from urlize
TRAILING_PUNCTUATION = ['.', ',', ':', ';', '.)', '"', "']", "'}", "'"]
WRAPPING_PUNCTUATION = [('(', ')'), ('<', '>'), ('[', ']'), ('<', '>'),
('"', '"'), ("'", "'")]
word_split_re = re.compile(r'(\s+)')
simple_url_re = re.compile(r'^https?://\[?\w', re.IGNORECASE)
simple_url_2_re = re.compile(r'^www\.|^(?!http)\w[^@]+\.(com|edu|gov|int|mil|net|org)$', re.IGNORECASE)
simple_email_re = re.compile(r'^\S+@\S+\.\S+$')
def smart_urlquote_wrapper(matched_url):
"""
Simple wrapper for smart_urlquote. ValueError("Invalid IPv6 URL") can
be raised here, see issue #1386
"""
try:
return smart_urlquote(matched_url)
except ValueError:
return None
@register.filter
def urlize_quoted_links(text, trim_url_limit=None, nofollow=True, autoescape=True):
"""
Converts any URLs in text into clickable links.
Works on http://, https://, www. links, and also on links ending in one of
the original seven gTLDs (.com, .edu, .gov, .int, .mil, .net, and .org).
Links can have trailing punctuation (periods, commas, close-parens) and
leading punctuation (opening parens) and it'll still do the right thing.
If trim_url_limit is not None, the URLs in link text longer than this limit
will truncated to trim_url_limit-3 characters and appended with an elipsis.
If nofollow is True, the URLs in link text will get a rel="nofollow"
attribute.
If autoescape is True, the link text and URLs will get autoescaped.
"""
def trim_url(x, limit=trim_url_limit):
return limit is not None and (len(x) > limit and ('%s...' % x[:max(0, limit - 3)])) or x
safe_input = isinstance(text, SafeData)
words = word_split_re.split(force_text(text))
for i, word in enumerate(words):
if '.' in word or '@' in word or ':' in word:
# Deal with punctuation.
lead, middle, trail = '', word, ''
for punctuation in TRAILING_PUNCTUATION:
if middle.endswith(punctuation):
middle = middle[:-len(punctuation)]
trail = punctuation + trail
for opening, closing in WRAPPING_PUNCTUATION:
if middle.startswith(opening):
middle = middle[len(opening):]
lead = lead + opening
# Keep parentheses at the end only if they're balanced.
if (
middle.endswith(closing) and
middle.count(closing) == middle.count(opening) + 1
):
middle = middle[:-len(closing)]
trail = closing + trail
# Make URL we want to point to.
url = None
nofollow_attr = ' rel="nofollow"' if nofollow else ''
if simple_url_re.match(middle):
url = smart_urlquote_wrapper(middle)
elif simple_url_2_re.match(middle):
url = smart_urlquote_wrapper('http://%s' % middle)
elif ':' not in middle and simple_email_re.match(middle):
local, domain = middle.rsplit('@', 1)
try:
domain = domain.encode('idna').decode('ascii')
except UnicodeError:
continue
url = 'mailto:%s@%s' % (local, domain)
nofollow_attr = ''
# Make link.
if url:
trimmed = trim_url(middle)
if autoescape and not safe_input:
lead, trail = escape(lead), escape(trail)
url, trimmed = escape(url), escape(trimmed)
middle = '<a href="%s"%s>%s</a>' % (url, nofollow_attr, trimmed)
words[i] = mark_safe('%s%s%s' % (lead, middle, trail))
else:
if safe_input:
words[i] = mark_safe(word)
elif autoescape:
words[i] = escape(word)
elif safe_input:
words[i] = mark_safe(word)
elif autoescape:
words[i] = escape(word)
return ''.join(words)
@register.filter
def break_long_headers(header):
"""
Breaks headers longer than 160 characters (~page length)
when possible (are comma separated)
"""
if len(header) > 160 and ',' in header:
header = mark_safe('<br> ' + ', <br>'.join(header.split(',')))
return header
|
inovtec-solutions/OpenERP | refs/heads/branch_openerp | openerp/addons/base/module/wizard/base_module_scan.py | 24 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import os
import glob
import imp
import zipfile
from openerp import tools
from openerp.osv import osv
class base_module_scan(osv.osv_memory):
""" scan module """
_name = "base.module.scan"
_description = "scan module"
def watch_dir(self, cr, uid, ids, context):
mod_obj = self.pool.get('ir.module.module')
all_mods = mod_obj.read(cr, uid, mod_obj.search(cr, uid, []), ['name', 'state'])
known_modules = [x['name'] for x in all_mods]
ls_ad = glob.glob(os.path.join(tools.config['addons_path'], '*', '__terp__.py'))
modules = [module_name_re.match(name).group(1) for name in ls_ad]
for fname in os.listdir(tools.config['addons_path']):
if zipfile.is_zipfile(fname):
modules.append( fname.split('.')[0])
for module in modules:
if module in known_modules:
continue
terp = mod_obj.get_module_info(module)
if not terp.get('installable', True):
continue
# XXX check if this code is correct...
fm = imp.find_module(module)
try:
imp.load_module(module, *fm)
finally:
if fm[0]:
fm[0].close()
values = mod_obj.get_values_from_terp(terp)
mod_id = mod_obj.create(cr, uid, dict(name=module, state='uninstalled', **values))
dependencies = terp.get('depends', [])
for d in dependencies:
cr.execute('insert into ir_module_module_dependency (module_id,name) values (%s, %s)', (mod_id, d))
for module in known_modules:
terp = mod_obj.get_module_info(module)
if terp.get('installable', True):
for mod in all_mods:
if mod['name'] == module and mod['state'] == 'uninstallable':
mod_obj.write(cr, uid, [mod['id']], {'state': 'uninstalled'})
return {}
base_module_scan()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
roadhead/satchmo | refs/heads/master | satchmo/contact/forms.py | 2 | from django import forms
from django.contrib.auth.models import User
from django.db.models import Q
from django.utils.translation import ugettext_lazy as _, ugettext
from satchmo.configuration import config_value, config_get_group, SettingNotSet, SHOP_GROUP
from satchmo.contact.models import Contact, AddressBook, PhoneNumber, Organization
from satchmo.l10n.models import Country
from satchmo.shop.models import Config
import datetime
import logging
import signals
from django.forms.extras.widgets import SelectDateWidget
log = logging.getLogger('satchmo.contact.forms')
selection = ''
class ContactInfoForm(forms.Form):
email = forms.EmailField(max_length=75, label=_('Email'))
title = forms.CharField(max_length=30, label=_('Title'), required=False)
first_name = forms.CharField(max_length=30, label=_('First Name'))
last_name = forms.CharField(max_length=30, label=_('Last Name'))
phone = forms.CharField(max_length=30, label=_('Phone'))
addressee = forms.CharField(max_length=61, required=False, label=_('Addressee'))
company = forms.CharField(max_length=50, required=False, label=_('Company'))
street1 = forms.CharField(max_length=30, label=_('Street'))
street2 = forms.CharField(max_length=30, required=False)
city = forms.CharField(max_length=30, label=_('City'))
state = forms.CharField(max_length=30, required=False, label=_('State'))
postal_code = forms.CharField(max_length=10, label=_('ZIP code/Postcode'))
copy_address = forms.BooleanField(required=False, label=_('Shipping same as billing?'))
ship_addressee = forms.CharField(max_length=61, required=False, label=_('Addressee'))
ship_street1 = forms.CharField(max_length=30, required=False, label=_('Street'))
ship_street2 = forms.CharField(max_length=30, required=False)
ship_city = forms.CharField(max_length=30, required=False, label=_('City'))
ship_state = forms.CharField(max_length=30, required=False, label=_('State'))
ship_postal_code = forms.CharField(max_length=10, required=False, label=_('ZIP code/Postcode'))
next = forms.CharField(max_length=40, required=False, widget=forms.HiddenInput())
def __init__(self, *args, **kwargs):
if kwargs:
data = kwargs.copy()
else:
data = {}
shop = data.pop('shop', None)
contact = data.pop('contact', None)
self.shippable = data.pop('shippable', True)
if not shop:
shop = Config.objects.get_current()
super(ContactInfoForm, self).__init__(*args, **data)
self._billing_data_optional = config_value(SHOP_GROUP, 'BILLING_DATA_OPTIONAL')
self._local_only = shop.in_country_only
areas = shop.areas()
if shop.in_country_only and areas and areas.count()>0:
areas = [(area.abbrev or area.name, area.name) for area in areas]
billing_state = (contact and getattr(contact.billing_address, 'state', None)) or selection
shipping_state = (contact and getattr(contact.shipping_address, 'state', None)) or selection
if config_value('SHOP','ENFORCE_STATE'):
self.fields['state'] = forms.ChoiceField(choices=areas, initial=billing_state, label=_('State'))
self.fields['ship_state'] = forms.ChoiceField(choices=areas, initial=shipping_state, required=False, label=_('State'))
self._default_country = shop.sales_country
billing_country = (contact and getattr(contact.billing_address, 'country', None)) or self._default_country
shipping_country = (contact and getattr(contact.shipping_address, 'country', None)) or self._default_country
self.fields['country'] = forms.ModelChoiceField(shop.countries(), required=False, label=_('Country'), empty_label=None, initial=billing_country.pk)
self.fields['ship_country'] = forms.ModelChoiceField(shop.countries(), required=False, label=_('Country'), empty_label=None, initial=shipping_country.pk)
self.contact = contact
if self._billing_data_optional:
for fname in ('phone', 'street1', 'street2', 'city', 'state', 'country', 'postal_code', 'title'):
self.fields[fname].required = False
# slap a star on the required fields
for f in self.fields:
fld = self.fields[f]
if fld.required:
fld.label = (fld.label or f) + '*'
def _check_state(self, data, country):
if country and config_value('SHOP','ENFORCE_STATE') and country.adminarea_set.filter(active=True).count() > 0:
if not data or data == selection:
raise forms.ValidationError(
self._local_only and _('This field is required.') \
or _('State is required for your country.'))
if (country.adminarea_set
.filter(active=True)
.filter(Q(name=data)
|Q(abbrev=data)
|Q(name=data.capitalize())
|Q(abbrev=data.upper())).count() != 1):
raise forms.ValidationError(_('Invalid state or province.'))
def clean_email(self):
"""Prevent account hijacking by disallowing duplicate emails."""
email = self.cleaned_data.get('email', None)
if self.contact:
if self.contact.email and self.contact.email == email:
return email
users_with_email = Contact.objects.filter(email=email)
if len(users_with_email) == 0:
return email
if len(users_with_email) > 1 or users_with_email[0].id != self.contact.id:
raise forms.ValidationError(
ugettext("That email address is already in use."))
return email
def clean_postal_code(self):
postcode = self.cleaned_data.get('postal_code')
country = None
if self._local_only:
shop_config = Config.objects.get_current()
country = shop_config.sales_country
else:
country = self.fields['country'].clean(self.data.get('country'))
if not country:
# Either the store is misconfigured, or the country was
# not supplied, so the country validation will fail and
# we can defer the postcode validation until that's fixed.
return postcode
return self.validate_postcode_by_country(postcode, country)
def clean_state(self):
data = self.cleaned_data.get('state')
if self._local_only:
country = self._default_country
else:
country = self.fields['country'].clean(self.data.get('country'))
if country == None:
raise forms.ValidationError(_('This field is required.'))
self._check_state(data, country)
return data
def clean_addressee(self):
if not self.cleaned_data.get('addressee'):
first_and_last = u' '.join((self.cleaned_data.get('first_name', ''),
self.cleaned_data.get('last_name', '')))
return first_and_last
else:
return self.cleaned_data['addressee']
def clean_ship_addressee(self):
if not self.cleaned_data.get('ship_addressee') and \
not self.cleaned_data.get('copy_address'):
first_and_last = u' '.join((self.cleaned_data.get('first_name', ''),
self.cleaned_data.get('last_name', '')))
return first_and_last
else:
return self.cleaned_data['ship_addressee']
def clean_country(self):
if self._local_only:
return self._default_country
else:
if not self.cleaned_data.get('country'):
log.error("No country! Got '%s'" % self.cleaned_data.get('country'))
raise forms.ValidationError(_('This field is required.'))
return self.cleaned_data['country']
def clean_ship_country(self):
copy_address = self.fields['copy_address'].clean(self.data.get('copy_address'))
if copy_address:
return self.cleaned_data.get('country')
if self._local_only:
return self._default_country
if not self.shippable:
return self.cleaned_data.get['country']
shipcountry = self.cleaned_data.get('ship_country')
if not shipcountry:
raise forms.ValidationError(_('This field is required.'))
if config_value('PAYMENT', 'COUNTRY_MATCH'):
country = self.cleaned_data.get('country')
if shipcountry != country:
raise forms.ValidationError(_('Shipping and Billing countries must match'))
return shipcountry
def ship_charfield_clean(self, field_name):
if self.cleaned_data.get('copy_address'):
self.cleaned_data['ship_' + field_name] = self.fields[field_name].clean(self.data.get(field_name))
return self.cleaned_data['ship_' + field_name]
return self.fields['ship_' + field_name].clean(self.data.get('ship_' + field_name))
def clean_ship_street1(self):
return self.ship_charfield_clean('street1')
def clean_ship_street2(self):
if self.cleaned_data.get('copy_address'):
if 'street2' in self.cleaned_data:
self.cleaned_data['ship_street2'] = self.cleaned_data.get('street2')
return self.cleaned_data.get('ship_street2')
def clean_ship_city(self):
return self.ship_charfield_clean('city')
def clean_ship_postal_code(self):
code = self.ship_charfield_clean('postal_code')
country = None
if self._local_only:
shop_config = Config.objects.get_current()
country = shop_config.sales_country
else:
country = self.ship_charfield_clean('country')
if not country:
# Either the store is misconfigured, or the country was
# not supplied, so the country validation will fail and
# we can defer the postcode validation until that's fixed.
return code
return self.validate_postcode_by_country(code, country)
def clean_ship_state(self):
data = self.cleaned_data.get('ship_state')
if self.cleaned_data.get('copy_address'):
if 'state' in self.cleaned_data:
self.cleaned_data['ship_state'] = self.cleaned_data['state']
return self.cleaned_data['ship_state']
if self._local_only:
country = self._default_country
else:
country = self.ship_charfield_clean('country')
self._check_state(data, country)
return data
def save(self, contact=None, update_newsletter=True, **kwargs):
return self.save_info(contact=contact, update_newsletter=update_newsletter, **kwargs)
def save_info(self, contact=None, update_newsletter=True, **kwargs):
"""Save the contact info into the database.
Checks to see if contact exists. If not, creates a contact
and copies in the address and phone number."""
if not contact:
customer = Contact()
log.debug('creating new contact')
else:
customer = contact
log.debug('Saving contact info for %s', contact)
data = self.cleaned_data.copy()
country = data['country']
if not isinstance(country, Country):
country = Country.objects.get(pk=country)
data['country'] = country
data['country_id'] = country.id
shipcountry = data['ship_country']
if not isinstance(shipcountry, Country):
shipcountry = Country.objects.get(pk=shipcountry)
data['ship_country'] = shipcountry
data['ship_country_id'] = shipcountry.id
companyname = data.pop('company', None)
if companyname:
org = Organization.objects.by_name(companyname, create=True)
customer.organization = org
for field in customer.__dict__.keys():
try:
setattr(customer, field, data[field])
except KeyError:
pass
if update_newsletter and config_get_group('NEWSLETTER'):
from satchmo.newsletter import update_subscription
if 'newsletter' not in data:
subscribed = False
else:
subscribed = data['newsletter']
update_subscription(contact, subscribed)
if not customer.role:
customer.role = "Customer"
customer.save()
# we need to make sure we don't blindly add new addresses
# this isn't ideal, but until we have a way to manage addresses
# this will force just the two addresses, shipping and billing
# TODO: add address management like Amazon.
bill_address = customer.billing_address
if not bill_address:
bill_address = AddressBook(contact=customer)
changed_location = False
address_keys = bill_address.__dict__.keys()
for field in address_keys:
if (not changed_location) and field in ('state', 'country', 'city'):
if getattr(bill_address, field) != data[field]:
changed_location = True
try:
setattr(bill_address, field, data[field])
except KeyError:
pass
bill_address.is_default_billing = True
copy_address = data['copy_address']
ship_address = customer.shipping_address
if copy_address:
# make sure we don't have any other default shipping address
if ship_address and ship_address.id != bill_address.id:
ship_address.delete()
bill_address.is_default_shipping = True
bill_address.save()
if not copy_address:
if not ship_address or ship_address.id == bill_address.id:
ship_address = AddressBook()
for field in address_keys:
if (not changed_location) and field in ('state', 'country', 'city'):
if getattr(ship_address, field) != data[field]:
changed_location = True
try:
setattr(ship_address, field, data['ship_' + field])
except KeyError:
pass
ship_address.is_default_shipping = True
ship_address.is_default_billing = False
ship_address.contact = customer
ship_address.save()
if not customer.primary_phone:
phone = PhoneNumber()
phone.primary = True
else:
phone = customer.primary_phone
phone.phone = data['phone']
phone.contact = customer
phone.save()
signals.form_save.send(ContactInfoForm, object=customer, formdata=data, form=self)
if changed_location:
signals.satchmo_contact_location_changed.send(self, contact=customer)
return customer.id
def validate_postcode_by_country(self, postcode, country):
responses = signals.validate_postcode.send(self, postcode=postcode, country=country)
# allow responders to reformat the code, but if they don't return
# anything, then just use the existing code
for responder, response in responses:
if response:
return response
return postcode
class DateTextInput(forms.TextInput):
def render(self, name, value, attrs=None):
if isinstance(value, datetime.date):
value = value.strftime("%m.%d.%Y")
return super(DateTextInput, self).render(name, value, attrs)
class ExtendedContactInfoForm(ContactInfoForm):
"""Contact form which includes birthday and newsletter."""
years_to_display = range(datetime.datetime.now().year-100,datetime.datetime.now().year+1)
dob = forms.DateField(widget=SelectDateWidget(years=years_to_display), required=False)
newsletter = forms.BooleanField(label=_('Newsletter'), widget=forms.CheckboxInput(), required=False)
|
vany-egorov/node-gyp | refs/heads/master | gyp/tools/graphviz.py | 2679 | #!/usr/bin/env python
# Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Using the JSON dumped by the dump-dependency-json generator,
generate input suitable for graphviz to render a dependency graph of
targets."""
import collections
import json
import sys
def ParseTarget(target):
target, _, suffix = target.partition('#')
filename, _, target = target.partition(':')
return filename, target, suffix
def LoadEdges(filename, targets):
"""Load the edges map from the dump file, and filter it to only
show targets in |targets| and their depedendents."""
file = open('dump.json')
edges = json.load(file)
file.close()
# Copy out only the edges we're interested in from the full edge list.
target_edges = {}
to_visit = targets[:]
while to_visit:
src = to_visit.pop()
if src in target_edges:
continue
target_edges[src] = edges[src]
to_visit.extend(edges[src])
return target_edges
def WriteGraph(edges):
"""Print a graphviz graph to stdout.
|edges| is a map of target to a list of other targets it depends on."""
# Bucket targets by file.
files = collections.defaultdict(list)
for src, dst in edges.items():
build_file, target_name, toolset = ParseTarget(src)
files[build_file].append(src)
print 'digraph D {'
print ' fontsize=8' # Used by subgraphs.
print ' node [fontsize=8]'
# Output nodes by file. We must first write out each node within
# its file grouping before writing out any edges that may refer
# to those nodes.
for filename, targets in files.items():
if len(targets) == 1:
# If there's only one node for this file, simplify
# the display by making it a box without an internal node.
target = targets[0]
build_file, target_name, toolset = ParseTarget(target)
print ' "%s" [shape=box, label="%s\\n%s"]' % (target, filename,
target_name)
else:
# Group multiple nodes together in a subgraph.
print ' subgraph "cluster_%s" {' % filename
print ' label = "%s"' % filename
for target in targets:
build_file, target_name, toolset = ParseTarget(target)
print ' "%s" [label="%s"]' % (target, target_name)
print ' }'
# Now that we've placed all the nodes within subgraphs, output all
# the edges between nodes.
for src, dsts in edges.items():
for dst in dsts:
print ' "%s" -> "%s"' % (src, dst)
print '}'
def main():
if len(sys.argv) < 2:
print >>sys.stderr, __doc__
print >>sys.stderr
print >>sys.stderr, 'usage: %s target1 target2...' % (sys.argv[0])
return 1
edges = LoadEdges('dump.json', sys.argv[1:])
WriteGraph(edges)
return 0
if __name__ == '__main__':
sys.exit(main())
|
ftomassetti/intellij-community | refs/heads/master | python/testData/resolve/multiFile/relativeSimple/sys.py | 83 | token = "local"
|
diagramsoftware/odoo | refs/heads/8.0 | addons/crm/crm_segmentation.py | 333 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields,osv,orm
class crm_segmentation(osv.osv):
'''
A segmentation is a tool to automatically assign categories on partners.
These assignations are based on criterions.
'''
_name = "crm.segmentation"
_description = "Partner Segmentation"
_columns = {
'name': fields.char('Name', required=True, help='The name of the segmentation.'),
'description': fields.text('Description'),
'categ_id': fields.many2one('res.partner.category', 'Partner Category',\
required=True, help='The partner category that will be \
added to partners that match the segmentation criterions after computation.'),
'exclusif': fields.boolean('Exclusive', help='Check if the category is limited to partners that match the segmentation criterions.\
\nIf checked, remove the category from partners that doesn\'t match segmentation criterions'),
'state': fields.selection([('not running','Not Running'),\
('running','Running')], 'Execution Status', readonly=True),
'partner_id': fields.integer('Max Partner ID processed'),
'segmentation_line': fields.one2many('crm.segmentation.line', \
'segmentation_id', 'Criteria', required=True, copy=True),
'sales_purchase_active': fields.boolean('Use The Sales Purchase Rules', help='Check if you want to use this tab as part of the segmentation rule. If not checked, the criteria beneath will be ignored')
}
_defaults = {
'partner_id': lambda *a: 0,
'state': lambda *a: 'not running',
}
def process_continue(self, cr, uid, ids, start=False):
""" @param self: The object pointer
@param cr: the current row, from the database cursor,
@param uid: the current userโs ID for security checks,
@param ids: List of Process continueโs IDs"""
partner_obj = self.pool.get('res.partner')
categs = self.read(cr, uid, ids, ['categ_id', 'exclusif', 'sales_purchase_active'])
for categ in categs:
if start:
if categ['exclusif']:
cr.execute('delete from res_partner_res_partner_category_rel \
where category_id=%s', (categ['categ_id'][0],))
partner_obj.invalidate_cache(cr, uid, ['category_id'])
id = categ['id']
cr.execute('select id from res_partner order by id ')
partners = [x[0] for x in cr.fetchall()]
if categ['sales_purchase_active']:
to_remove_list=[]
cr.execute('select id from crm_segmentation_line where segmentation_id=%s', (id,))
line_ids = [x[0] for x in cr.fetchall()]
for pid in partners:
if (not self.pool.get('crm.segmentation.line').test(cr, uid, line_ids, pid)):
to_remove_list.append(pid)
for pid in to_remove_list:
partners.remove(pid)
for partner in partner_obj.browse(cr, uid, partners):
category_ids = [categ_id.id for categ_id in partner.category_id]
if categ['categ_id'][0] not in category_ids:
cr.execute('insert into res_partner_res_partner_category_rel (category_id,partner_id) \
values (%s,%s)', (categ['categ_id'][0], partner.id))
partner_obj.invalidate_cache(cr, uid, ['category_id'], [partner.id])
self.write(cr, uid, [id], {'state':'not running', 'partner_id':0})
return True
def process_stop(self, cr, uid, ids, *args):
""" @param self: The object pointer
@param cr: the current row, from the database cursor,
@param uid: the current userโs ID for security checks,
@param ids: List of Process stopโs IDs"""
return self.write(cr, uid, ids, {'state':'not running', 'partner_id':0})
def process_start(self, cr, uid, ids, *args):
""" @param self: The object pointer
@param cr: the current row, from the database cursor,
@param uid: the current userโs ID for security checks,
@param ids: List of Process startโs IDs """
self.write(cr, uid, ids, {'state':'running', 'partner_id':0})
return self.process_continue(cr, uid, ids, start=True)
class crm_segmentation_line(osv.osv):
""" Segmentation line """
_name = "crm.segmentation.line"
_description = "Segmentation line"
_columns = {
'name': fields.char('Rule Name', required=True),
'segmentation_id': fields.many2one('crm.segmentation', 'Segmentation'),
'expr_name': fields.selection([('sale','Sale Amount'),
('purchase','Purchase Amount')], 'Control Variable', required=True),
'expr_operator': fields.selection([('<','<'),('=','='),('>','>')], 'Operator', required=True),
'expr_value': fields.float('Value', required=True),
'operator': fields.selection([('and','Mandatory Expression'),\
('or','Optional Expression')],'Mandatory / Optional', required=True),
}
_defaults = {
'expr_name': lambda *a: 'sale',
'expr_operator': lambda *a: '>',
'operator': lambda *a: 'and'
}
def test(self, cr, uid, ids, partner_id):
""" @param self: The object pointer
@param cr: the current row, from the database cursor,
@param uid: the current userโs ID for security checks,
@param ids: List of Testโs IDs """
expression = {'<': lambda x,y: x<y, '=':lambda x,y:x==y, '>':lambda x,y:x>y}
ok = False
lst = self.read(cr, uid, ids)
for l in lst:
cr.execute('select * from ir_module_module where name=%s and state=%s', ('account','installed'))
if cr.fetchone():
if l['expr_name']=='sale':
cr.execute('SELECT SUM(l.price_unit * l.quantity) ' \
'FROM account_invoice_line l, account_invoice i ' \
'WHERE (l.invoice_id = i.id) ' \
'AND i.partner_id = %s '\
'AND i.type = \'out_invoice\'',
(partner_id,))
value = cr.fetchone()[0] or 0.0
cr.execute('SELECT SUM(l.price_unit * l.quantity) ' \
'FROM account_invoice_line l, account_invoice i ' \
'WHERE (l.invoice_id = i.id) ' \
'AND i.partner_id = %s '\
'AND i.type = \'out_refund\'',
(partner_id,))
value -= cr.fetchone()[0] or 0.0
elif l['expr_name']=='purchase':
cr.execute('SELECT SUM(l.price_unit * l.quantity) ' \
'FROM account_invoice_line l, account_invoice i ' \
'WHERE (l.invoice_id = i.id) ' \
'AND i.partner_id = %s '\
'AND i.type = \'in_invoice\'',
(partner_id,))
value = cr.fetchone()[0] or 0.0
cr.execute('SELECT SUM(l.price_unit * l.quantity) ' \
'FROM account_invoice_line l, account_invoice i ' \
'WHERE (l.invoice_id = i.id) ' \
'AND i.partner_id = %s '\
'AND i.type = \'in_refund\'',
(partner_id,))
value -= cr.fetchone()[0] or 0.0
res = expression[l['expr_operator']](value, l['expr_value'])
if (not res) and (l['operator']=='and'):
return False
if res:
return True
return True
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
slipperyhank/pyphi | refs/heads/develop | pyphi/cache.py | 1 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# cache.py
"""
A memory-limited cache decorator.
"""
import os
import pickle
from functools import namedtuple, update_wrapper, wraps
import psutil
import redis
from . import config, constants
_CacheInfo = namedtuple("CacheInfo", ["hits", "misses", "currsize"])
def memory_full():
"""Check if the memory is too full for further caching."""
current_process = psutil.Process(os.getpid())
return (current_process.memory_percent() >
config.MAXIMUM_CACHE_MEMORY_PERCENTAGE)
class _HashedSeq(list):
"""This class guarantees that hash() will be called no more than once
per element. This is important because the lru_cache() will hash
the key multiple times on a cache miss.
"""
__slots__ = 'hashvalue'
def __init__(self, tup, hash=hash):
self[:] = tup
self.hashvalue = hash(tup)
def __hash__(self):
return self.hashvalue
def _make_key(args, kwds, typed,
kwd_mark=(object(),),
fasttypes={int, str, frozenset, type(None)},
sorted=sorted, tuple=tuple, type=type, len=len):
"""Make a cache key from optionally typed positional and keyword arguments.
The key is constructed in a way that is flat as possible rather than as a
nested structure that would take more memory.
If there is only a single argument and its data type is known to cache its
hash value, then that argument is returned without a wrapper. This saves
space and improves lookup speed.
"""
key = args
if kwds:
sorted_items = sorted(kwds.items())
key += kwd_mark
for item in sorted_items:
key += item
if typed:
key += tuple(type(v) for v in args)
if kwds:
key += tuple(type(v) for k, v in sorted_items)
elif len(key) == 1 and type(key[0]) in fasttypes:
return key[0]
return _HashedSeq(key)
def cache(cache={}, maxmem=config.MAXIMUM_CACHE_MEMORY_PERCENTAGE,
typed=False):
"""Memory-limited cache decorator.
``maxmem`` is a float between 0 and 100, inclusive, specifying the maximum
percentage of physical memory that the cache can use.
If ``typed`` is ``True``, arguments of different types will be cached
separately. For example, f(3.0) and f(3) will be treated as distinct calls
with distinct results.
Arguments to the cached function must be hashable.
View the cache statistics named tuple (hits, misses, currsize)
with f.cache_info(). Clear the cache and statistics with f.cache_clear().
Access the underlying function with f.__wrapped__.
"""
# Constants shared by all lru cache instances:
# Unique object used to signal cache misses.
sentinel = object()
# Build a key from the function arguments.
make_key = _make_key
def decorating_function(user_function, hits=0, misses=0):
full = False
# Bound method to look up a key or return None.
cache_get = cache.get
if not maxmem:
def wrapper(*args, **kwds):
# Simple caching without memory limit.
nonlocal hits, misses
key = make_key(args, kwds, typed)
result = cache_get(key, sentinel)
if result is not sentinel:
hits += 1
return result
result = user_function(*args, **kwds)
cache[key] = result
misses += 1
return result
else:
def wrapper(*args, **kwds):
# Memory-limited caching.
nonlocal hits, misses, full
key = make_key(args, kwds, typed)
result = cache_get(key)
if result is not None:
hits += 1
return result
result = user_function(*args, **kwds)
if not full:
cache[key] = result
# Cache is full if the total recursive usage is greater
# than the maximum allowed percentage.
current_process = psutil.Process(os.getpid())
full = current_process.memory_percent() > maxmem
misses += 1
return result
def cache_info():
"""Report cache statistics."""
return _CacheInfo(hits, misses, len(cache))
def cache_clear():
"""Clear the cache and cache statistics."""
nonlocal hits, misses, full
cache.clear()
hits = misses = 0
full = False
wrapper.cache_info = cache_info
wrapper.cache_clear = cache_clear
return update_wrapper(wrapper, user_function)
return decorating_function
class DictCache():
"""A generic dictionary-based cache.
Intended to be used as an object-level cache of method results.
"""
def __init__(self):
self.cache = {}
self.hits = 0
self.misses = 0
def clear(self):
self.cache = {}
self.hits = 0
self.misses = 0
def size(self):
"""Number of items in cache"""
return len(self.cache)
def info(self):
"""Return info about cache hits, misses, and size"""
return _CacheInfo(self.hits, self.misses, self.size())
def get(self, key):
"""Get a value out of the cache.
Returns None if the key is not in the cache. Updates cache
statistics.
"""
if key in self.cache:
self.hits += 1
return self.cache[key]
self.misses += 1
return None
def set(self, key, value):
"""Set a value in the cache"""
self.cache[key] = value
# TODO: handle **kwarg keys if needed
# See joblib.func_inspect.filter_args
def key(self, *args, _prefix=None, **kwargs):
"""Get the cache key for the given function args.
Kwargs:
prefix: A constant to prefix to the key.
"""
if kwargs:
raise NotImplementedError(
'kwarg cache keys not implemented')
return (_prefix,) + tuple(args)
# TODO: confirm that a global connection/pool makes sense, esp for multiprocesssing
# TODO: maybe just expose the connction `if REDIS_CACHE`, instead of with this
# singleton business
class RedisConn:
"""Singleton redis connection object.
Expose the StrictRedis api, but only maintain one connection pool.
Raises:
redis.exceptions.ConnectionError: If the Redis server is not available.
"""
instance = None
def __init__(self):
if RedisConn.instance is None:
conn = redis.StrictRedis(host=config.REDIS_CONFIG['host'],
port=config.REDIS_CONFIG['port'],
db=0)
# TODO: we probably don't want to flush all, huh?
# Will we ever have stale/incorrect results in the cache?
conn.flushall()
RedisConn.instance = conn
def __getattr__(self, name):
"""Delegate lookup to ``StrictRedis``"""
return getattr(self.instance, name)
# TODO: use a cache prefix?
# TODO: key schema for easy access/queries
class RedisCache():
def clear(self):
raise NotImplementedError
def size(self):
"""Size of the Redis cache.
.. note:: This is the size of the entire Redis database.
"""
return RedisConn().dbsize()
def info(self):
"""Return cache information.
.. note:: This is not the cache info for the entire Redis key space.
"""
info = RedisConn().info()
return _CacheInfo(info['keyspace_hits'],
info['keyspace_misses'],
self.size())
def get(self, key):
"""Get a value from the cache.
Returns None if the key is not in the cache.
"""
value = RedisConn().get(key)
if value is not None:
value = pickle.loads(value)
return value
def set(self, key, value):
"""Set a value in the cache."""
value = pickle.dumps(value, protocol=constants.PICKLE_PROTOCOL)
RedisConn().set(key, value)
def key(self):
"""Delegate to subclasses."""
raise NotImplementedError
def validate_parent_cache(parent_cache):
# TODO: also validate that subsystem is a
# cut version of parent_cache.subsystem?
# Do we need to check this at all?
if parent_cache.subsystem.is_cut:
raise ValueError("parent_cache must be from an uncut subsystem")
class RedisMiceCache(RedisCache):
"""A Redis-backed cache for `Subsystem.find_mice`.
See :func:`MiceCache` for more info.
"""
def __init__(self, subsystem, parent_cache=None):
super().__init__()
self.subsystem = subsystem
self.subsystem_hash = hash(subsystem)
if parent_cache is not None:
validate_parent_cache(parent_cache)
# Store the hash of the parent subsystem. We don't want to store the
# parent subsystem explicitly so that it does not need to be passed
# between processes.
self.parent_subsystem_hash = parent_cache.subsystem_hash
else:
self.parent_subsystem_hash = None
# TODO: if the value is found in the parent cache, store it in this
# cache so we don't have to call `damaged_by_cut` over and over?
def get(self, key):
"""Get a value from the cache.
If the Mice cannot be found in this cache, try and find it in the
parent cache.
"""
mice = super().get(key)
if mice is not None: # Hit
return mice
# Try and get the key from the parent cache.
if self.parent_subsystem_hash:
parent_key = key.replace(str(self.subsystem_hash),
str(self.parent_subsystem_hash), 1)
mice = super().get(parent_key)
if mice is not None and not mice.damaged_by_cut(self.subsystem):
return mice
return None
def set(self, key, value):
"""Only need to set if the subsystem is uncut.
Caches are only inherited from uncut subsystems.
"""
if not self.subsystem.is_cut:
super().set(key, value)
def key(self, direction, mechanism, purviews=False, _prefix=None):
"""Cache key. This is the call signature of |find_mice|"""
return "subsys:{}:{}:{}:{}:{}".format(
self.subsystem_hash, _prefix, direction, mechanism, purviews)
class DictMiceCache(DictCache):
"""A subsystem-local cache for |Mice| objects.
See :func:`MiceCache` for more info.
"""
def __init__(self, subsystem, parent_cache=None):
super().__init__()
self.subsystem = subsystem
if parent_cache is not None:
validate_parent_cache(parent_cache)
self._build(parent_cache)
def _build(self, parent_cache):
"""Build the initial cache from the parent.
Only include the Mice which are unaffected by the subsystem cut.
A Mice is affected if either the cut splits the mechanism
or splits the connections between the purview and mechanism
"""
for key, mice in parent_cache.cache.items():
if not mice.damaged_by_cut(self.subsystem):
self.cache[key] = mice
def set(self, key, mice):
"""Set a value in the cache.
Only cache if:
- The subsystem is uncut (caches are only inherited from
uncut subsystems so there is no reason to cache on cut
subsystems.)
- |phi| > 0. Ideally we would cache all mice, but the size
of the cache grows way too large, making parallel computations
incredibly inefficient because the caches have to be passed
between process. This will be changed once global caches are
implemented.
- Memory is not too full.
"""
if (not self.subsystem.is_cut and mice.phi > 0
and not memory_full()):
self.cache[key] = mice
def key(self, direction, mechanism, purviews=False, _prefix=None):
"""Cache key. This is the call signature of |find_mice|"""
return (_prefix, direction, mechanism, purviews)
def MiceCache(subsystem, parent_cache=None):
"""Construct a Mice cache.
Uses either a Redis-backed cache or a local dict cache on the object.
Args:
subsystem (Subsystem): The subsystem that this is a cache for.
Kwargs:
parent_cache (MiceCache): The cache generated by the uncut
version of ``subsystem``. Any cached |Mice| which are
unaffected by the cut are reused in this cache. If None,
the cache is initialized empty.
"""
if config.REDIS_CACHE:
cls = RedisMiceCache
else:
cls = DictMiceCache
return cls(subsystem, parent_cache=parent_cache)
class PurviewCache(DictCache):
"""A network-level cache for possible purviews."""
def set(self, key, value):
"""Only set if purview caching is enabled"""
if config.CACHE_POTENTIAL_PURVIEWS:
self.cache[key] = value
def method(cache_name, key_prefix=None):
"""Caching decorator for object-level method caches.
Cache key generation is delegated to the cache.
Args:
cache_name (str): The name of the (already-instantiated) cache
on the decorated object which should be used to store results
of this method.
*key_prefix: A constant to use as part of the cache key in addition
to the method arguments.
"""
def decorator(func):
@wraps(func)
def wrapper(obj, *args, **kwargs):
cache = getattr(obj, cache_name)
# Delegate key generation
key = cache.key(*args, _prefix=key_prefix, **kwargs)
# Get cached value, or compute
value = cache.get(key)
if value is None: # miss
value = func(obj, *args, **kwargs)
cache.set(key, value)
return value
return wrapper
return decorator
|
Galaxy-J5/android_kernel_samsung_j5nlte | refs/heads/cm-12.1 | tools/perf/tests/attr.py | 3174 | #! /usr/bin/python
import os
import sys
import glob
import optparse
import tempfile
import logging
import shutil
import ConfigParser
class Fail(Exception):
def __init__(self, test, msg):
self.msg = msg
self.test = test
def getMsg(self):
return '\'%s\' - %s' % (self.test.path, self.msg)
class Unsup(Exception):
def __init__(self, test):
self.test = test
def getMsg(self):
return '\'%s\'' % self.test.path
class Event(dict):
terms = [
'cpu',
'flags',
'type',
'size',
'config',
'sample_period',
'sample_type',
'read_format',
'disabled',
'inherit',
'pinned',
'exclusive',
'exclude_user',
'exclude_kernel',
'exclude_hv',
'exclude_idle',
'mmap',
'comm',
'freq',
'inherit_stat',
'enable_on_exec',
'task',
'watermark',
'precise_ip',
'mmap_data',
'sample_id_all',
'exclude_host',
'exclude_guest',
'exclude_callchain_kernel',
'exclude_callchain_user',
'wakeup_events',
'bp_type',
'config1',
'config2',
'branch_sample_type',
'sample_regs_user',
'sample_stack_user',
]
def add(self, data):
for key, val in data:
log.debug(" %s = %s" % (key, val))
self[key] = val
def __init__(self, name, data, base):
log.debug(" Event %s" % name);
self.name = name;
self.group = ''
self.add(base)
self.add(data)
def compare_data(self, a, b):
# Allow multiple values in assignment separated by '|'
a_list = a.split('|')
b_list = b.split('|')
for a_item in a_list:
for b_item in b_list:
if (a_item == b_item):
return True
elif (a_item == '*') or (b_item == '*'):
return True
return False
def equal(self, other):
for t in Event.terms:
log.debug(" [%s] %s %s" % (t, self[t], other[t]));
if not self.has_key(t) or not other.has_key(t):
return False
if not self.compare_data(self[t], other[t]):
return False
return True
def diff(self, other):
for t in Event.terms:
if not self.has_key(t) or not other.has_key(t):
continue
if not self.compare_data(self[t], other[t]):
log.warning("expected %s=%s, got %s" % (t, self[t], other[t]))
# Test file description needs to have following sections:
# [config]
# - just single instance in file
# - needs to specify:
# 'command' - perf command name
# 'args' - special command arguments
# 'ret' - expected command return value (0 by default)
#
# [eventX:base]
# - one or multiple instances in file
# - expected values assignments
class Test(object):
def __init__(self, path, options):
parser = ConfigParser.SafeConfigParser()
parser.read(path)
log.warning("running '%s'" % path)
self.path = path
self.test_dir = options.test_dir
self.perf = options.perf
self.command = parser.get('config', 'command')
self.args = parser.get('config', 'args')
try:
self.ret = parser.get('config', 'ret')
except:
self.ret = 0
self.expect = {}
self.result = {}
log.debug(" loading expected events");
self.load_events(path, self.expect)
def is_event(self, name):
if name.find("event") == -1:
return False
else:
return True
def load_events(self, path, events):
parser_event = ConfigParser.SafeConfigParser()
parser_event.read(path)
# The event record section header contains 'event' word,
# optionaly followed by ':' allowing to load 'parent
# event' first as a base
for section in filter(self.is_event, parser_event.sections()):
parser_items = parser_event.items(section);
base_items = {}
# Read parent event if there's any
if (':' in section):
base = section[section.index(':') + 1:]
parser_base = ConfigParser.SafeConfigParser()
parser_base.read(self.test_dir + '/' + base)
base_items = parser_base.items('event')
e = Event(section, parser_items, base_items)
events[section] = e
def run_cmd(self, tempdir):
cmd = "PERF_TEST_ATTR=%s %s %s -o %s/perf.data %s" % (tempdir,
self.perf, self.command, tempdir, self.args)
ret = os.WEXITSTATUS(os.system(cmd))
log.info(" '%s' ret %d " % (cmd, ret))
if ret != int(self.ret):
raise Unsup(self)
def compare(self, expect, result):
match = {}
log.debug(" compare");
# For each expected event find all matching
# events in result. Fail if there's not any.
for exp_name, exp_event in expect.items():
exp_list = []
log.debug(" matching [%s]" % exp_name)
for res_name, res_event in result.items():
log.debug(" to [%s]" % res_name)
if (exp_event.equal(res_event)):
exp_list.append(res_name)
log.debug(" ->OK")
else:
log.debug(" ->FAIL");
log.debug(" match: [%s] matches %s" % (exp_name, str(exp_list)))
# we did not any matching event - fail
if (not exp_list):
exp_event.diff(res_event)
raise Fail(self, 'match failure');
match[exp_name] = exp_list
# For each defined group in the expected events
# check we match the same group in the result.
for exp_name, exp_event in expect.items():
group = exp_event.group
if (group == ''):
continue
for res_name in match[exp_name]:
res_group = result[res_name].group
if res_group not in match[group]:
raise Fail(self, 'group failure')
log.debug(" group: [%s] matches group leader %s" %
(exp_name, str(match[group])))
log.debug(" matched")
def resolve_groups(self, events):
for name, event in events.items():
group_fd = event['group_fd'];
if group_fd == '-1':
continue;
for iname, ievent in events.items():
if (ievent['fd'] == group_fd):
event.group = iname
log.debug('[%s] has group leader [%s]' % (name, iname))
break;
def run(self):
tempdir = tempfile.mkdtemp();
try:
# run the test script
self.run_cmd(tempdir);
# load events expectation for the test
log.debug(" loading result events");
for f in glob.glob(tempdir + '/event*'):
self.load_events(f, self.result);
# resolve group_fd to event names
self.resolve_groups(self.expect);
self.resolve_groups(self.result);
# do the expectation - results matching - both ways
self.compare(self.expect, self.result)
self.compare(self.result, self.expect)
finally:
# cleanup
shutil.rmtree(tempdir)
def run_tests(options):
for f in glob.glob(options.test_dir + '/' + options.test):
try:
Test(f, options).run()
except Unsup, obj:
log.warning("unsupp %s" % obj.getMsg())
def setup_log(verbose):
global log
level = logging.CRITICAL
if verbose == 1:
level = logging.WARNING
if verbose == 2:
level = logging.INFO
if verbose >= 3:
level = logging.DEBUG
log = logging.getLogger('test')
log.setLevel(level)
ch = logging.StreamHandler()
ch.setLevel(level)
formatter = logging.Formatter('%(message)s')
ch.setFormatter(formatter)
log.addHandler(ch)
USAGE = '''%s [OPTIONS]
-d dir # tests dir
-p path # perf binary
-t test # single test
-v # verbose level
''' % sys.argv[0]
def main():
parser = optparse.OptionParser(usage=USAGE)
parser.add_option("-t", "--test",
action="store", type="string", dest="test")
parser.add_option("-d", "--test-dir",
action="store", type="string", dest="test_dir")
parser.add_option("-p", "--perf",
action="store", type="string", dest="perf")
parser.add_option("-v", "--verbose",
action="count", dest="verbose")
options, args = parser.parse_args()
if args:
parser.error('FAILED wrong arguments %s' % ' '.join(args))
return -1
setup_log(options.verbose)
if not options.test_dir:
print 'FAILED no -d option specified'
sys.exit(-1)
if not options.test:
options.test = 'test*'
try:
run_tests(options)
except Fail, obj:
print "FAILED %s" % obj.getMsg();
sys.exit(-1)
sys.exit(0)
if __name__ == '__main__':
main()
|
sunqm/pyscf | refs/heads/master | pyscf/data/gyro.py | 2 | #!/usr/bin/env python
# Copyright 2014-2018 The PySCF Developers. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from pyscf.data.nucprop import g_factor_to_gyromagnetic_ratio
from pyscf.data.nucprop import get_nuc_g_factor
from pyscf.data.nucprop import ISOTOPE_GYRO
|
MungoRae/home-assistant | refs/heads/dev | homeassistant/components/cover/mqtt.py | 4 | """
Support for MQTT cover devices.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/cover.mqtt/
"""
import asyncio
import logging
import voluptuous as vol
from homeassistant.core import callback
import homeassistant.components.mqtt as mqtt
from homeassistant.components.cover import (
CoverDevice, ATTR_TILT_POSITION, SUPPORT_OPEN_TILT,
SUPPORT_CLOSE_TILT, SUPPORT_STOP_TILT, SUPPORT_SET_TILT_POSITION,
SUPPORT_OPEN, SUPPORT_CLOSE, SUPPORT_STOP, SUPPORT_SET_POSITION,
ATTR_POSITION)
from homeassistant.exceptions import TemplateError
from homeassistant.const import (
CONF_NAME, CONF_VALUE_TEMPLATE, CONF_OPTIMISTIC, STATE_OPEN,
STATE_CLOSED, STATE_UNKNOWN)
from homeassistant.components.mqtt import (
CONF_STATE_TOPIC, CONF_COMMAND_TOPIC, CONF_QOS, CONF_RETAIN,
valid_publish_topic, valid_subscribe_topic)
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
DEPENDENCIES = ['mqtt']
CONF_TILT_COMMAND_TOPIC = 'tilt_command_topic'
CONF_TILT_STATUS_TOPIC = 'tilt_status_topic'
CONF_POSITION_TOPIC = 'set_position_topic'
CONF_SET_POSITION_TEMPLATE = 'set_position_template'
CONF_PAYLOAD_OPEN = 'payload_open'
CONF_PAYLOAD_CLOSE = 'payload_close'
CONF_PAYLOAD_STOP = 'payload_stop'
CONF_STATE_OPEN = 'state_open'
CONF_STATE_CLOSED = 'state_closed'
CONF_TILT_CLOSED_POSITION = 'tilt_closed_value'
CONF_TILT_OPEN_POSITION = 'tilt_opened_value'
CONF_TILT_MIN = 'tilt_min'
CONF_TILT_MAX = 'tilt_max'
CONF_TILT_STATE_OPTIMISTIC = 'tilt_optimistic'
CONF_TILT_INVERT_STATE = 'tilt_invert_state'
DEFAULT_NAME = 'MQTT Cover'
DEFAULT_PAYLOAD_OPEN = 'OPEN'
DEFAULT_PAYLOAD_CLOSE = 'CLOSE'
DEFAULT_PAYLOAD_STOP = 'STOP'
DEFAULT_OPTIMISTIC = False
DEFAULT_RETAIN = False
DEFAULT_TILT_CLOSED_POSITION = 0
DEFAULT_TILT_OPEN_POSITION = 100
DEFAULT_TILT_MIN = 0
DEFAULT_TILT_MAX = 100
DEFAULT_TILT_OPTIMISTIC = False
DEFAULT_TILT_INVERT_STATE = False
OPEN_CLOSE_FEATURES = (SUPPORT_OPEN | SUPPORT_CLOSE | SUPPORT_STOP)
TILT_FEATURES = (SUPPORT_OPEN_TILT | SUPPORT_CLOSE_TILT | SUPPORT_STOP_TILT |
SUPPORT_SET_TILT_POSITION)
PLATFORM_SCHEMA = mqtt.MQTT_BASE_PLATFORM_SCHEMA.extend({
vol.Optional(CONF_COMMAND_TOPIC, default=None): valid_publish_topic,
vol.Optional(CONF_POSITION_TOPIC, default=None): valid_publish_topic,
vol.Optional(CONF_SET_POSITION_TEMPLATE, default=None): cv.template,
vol.Optional(CONF_RETAIN, default=DEFAULT_RETAIN): cv.boolean,
vol.Optional(CONF_STATE_TOPIC): valid_subscribe_topic,
vol.Optional(CONF_VALUE_TEMPLATE): cv.template,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_PAYLOAD_OPEN, default=DEFAULT_PAYLOAD_OPEN): cv.string,
vol.Optional(CONF_PAYLOAD_CLOSE, default=DEFAULT_PAYLOAD_CLOSE): cv.string,
vol.Optional(CONF_PAYLOAD_STOP, default=DEFAULT_PAYLOAD_STOP): cv.string,
vol.Optional(CONF_STATE_OPEN, default=STATE_OPEN): cv.string,
vol.Optional(CONF_STATE_CLOSED, default=STATE_CLOSED): cv.string,
vol.Optional(CONF_OPTIMISTIC, default=DEFAULT_OPTIMISTIC): cv.boolean,
vol.Optional(CONF_TILT_COMMAND_TOPIC, default=None): valid_publish_topic,
vol.Optional(CONF_TILT_STATUS_TOPIC, default=None): valid_subscribe_topic,
vol.Optional(CONF_TILT_CLOSED_POSITION,
default=DEFAULT_TILT_CLOSED_POSITION): int,
vol.Optional(CONF_TILT_OPEN_POSITION,
default=DEFAULT_TILT_OPEN_POSITION): int,
vol.Optional(CONF_TILT_MIN, default=DEFAULT_TILT_MIN): int,
vol.Optional(CONF_TILT_MAX, default=DEFAULT_TILT_MAX): int,
vol.Optional(CONF_TILT_STATE_OPTIMISTIC,
default=DEFAULT_TILT_OPTIMISTIC): cv.boolean,
vol.Optional(CONF_TILT_INVERT_STATE,
default=DEFAULT_TILT_INVERT_STATE): cv.boolean,
})
@asyncio.coroutine
def async_setup_platform(hass, config, async_add_devices, discovery_info=None):
"""Set up the MQTT Cover."""
value_template = config.get(CONF_VALUE_TEMPLATE)
if value_template is not None:
value_template.hass = hass
set_position_template = config.get(CONF_SET_POSITION_TEMPLATE)
if set_position_template is not None:
set_position_template.hass = hass
async_add_devices([MqttCover(
config.get(CONF_NAME),
config.get(CONF_STATE_TOPIC),
config.get(CONF_COMMAND_TOPIC),
config.get(CONF_TILT_COMMAND_TOPIC),
config.get(CONF_TILT_STATUS_TOPIC),
config.get(CONF_QOS),
config.get(CONF_RETAIN),
config.get(CONF_STATE_OPEN),
config.get(CONF_STATE_CLOSED),
config.get(CONF_PAYLOAD_OPEN),
config.get(CONF_PAYLOAD_CLOSE),
config.get(CONF_PAYLOAD_STOP),
config.get(CONF_OPTIMISTIC),
value_template,
config.get(CONF_TILT_OPEN_POSITION),
config.get(CONF_TILT_CLOSED_POSITION),
config.get(CONF_TILT_MIN),
config.get(CONF_TILT_MAX),
config.get(CONF_TILT_STATE_OPTIMISTIC),
config.get(CONF_TILT_INVERT_STATE),
config.get(CONF_POSITION_TOPIC),
set_position_template,
)])
class MqttCover(CoverDevice):
"""Representation of a cover that can be controlled using MQTT."""
def __init__(self, name, state_topic, command_topic, tilt_command_topic,
tilt_status_topic, qos, retain, state_open, state_closed,
payload_open, payload_close, payload_stop,
optimistic, value_template, tilt_open_position,
tilt_closed_position, tilt_min, tilt_max, tilt_optimistic,
tilt_invert, position_topic, set_position_template):
"""Initialize the cover."""
self._position = None
self._state = None
self._name = name
self._state_topic = state_topic
self._command_topic = command_topic
self._tilt_command_topic = tilt_command_topic
self._tilt_status_topic = tilt_status_topic
self._qos = qos
self._payload_open = payload_open
self._payload_close = payload_close
self._payload_stop = payload_stop
self._state_open = state_open
self._state_closed = state_closed
self._retain = retain
self._tilt_open_position = tilt_open_position
self._tilt_closed_position = tilt_closed_position
self._optimistic = optimistic or state_topic is None
self._template = value_template
self._tilt_value = None
self._tilt_min = tilt_min
self._tilt_max = tilt_max
self._tilt_optimistic = tilt_optimistic
self._tilt_invert = tilt_invert
self._position_topic = position_topic
self._set_position_template = set_position_template
@asyncio.coroutine
def async_added_to_hass(self):
"""Subscribe MQTT events.
This method is a coroutine.
"""
@callback
def tilt_updated(topic, payload, qos):
"""Handle tilt updates."""
if (payload.isnumeric() and
self._tilt_min <= int(payload) <= self._tilt_max):
level = self.find_percentage_in_range(float(payload))
self._tilt_value = level
self.hass.async_add_job(self.async_update_ha_state())
@callback
def message_received(topic, payload, qos):
"""Handle new MQTT message."""
if self._template is not None:
payload = self._template.async_render_with_possible_json_value(
payload)
if payload == self._state_open:
self._state = False
elif payload == self._state_closed:
self._state = True
elif payload.isnumeric() and 0 <= int(payload) <= 100:
if int(payload) > 0:
self._state = False
else:
self._state = True
self._position = int(payload)
else:
_LOGGER.warning(
"Payload is not True, False, or integer (0-100): %s",
payload)
return
self.hass.async_add_job(self.async_update_ha_state())
if self._state_topic is None:
# Force into optimistic mode.
self._optimistic = True
else:
yield from mqtt.async_subscribe(
self.hass, self._state_topic, message_received, self._qos)
if self._tilt_status_topic is None:
self._tilt_optimistic = True
else:
self._tilt_optimistic = False
self._tilt_value = STATE_UNKNOWN
yield from mqtt.async_subscribe(
self.hass, self._tilt_status_topic, tilt_updated, self._qos)
@property
def should_poll(self):
"""No polling needed."""
return False
@property
def name(self):
"""Return the name of the cover."""
return self._name
@property
def is_closed(self):
"""Return if the cover is closed."""
return self._state
@property
def current_cover_position(self):
"""Return current position of cover.
None is unknown, 0 is closed, 100 is fully open.
"""
return self._position
@property
def current_cover_tilt_position(self):
"""Return current position of cover tilt."""
return self._tilt_value
@property
def supported_features(self):
"""Flag supported features."""
supported_features = 0
if self._command_topic is not None:
supported_features = OPEN_CLOSE_FEATURES
if self._position_topic is not None:
supported_features |= SUPPORT_SET_POSITION
if self._tilt_command_topic is not None:
supported_features |= TILT_FEATURES
return supported_features
@asyncio.coroutine
def async_open_cover(self, **kwargs):
"""Move the cover up.
This method is a coroutine.
"""
mqtt.async_publish(
self.hass, self._command_topic, self._payload_open, self._qos,
self._retain)
if self._optimistic:
# Optimistically assume that cover has changed state.
self._state = False
self.hass.async_add_job(self.async_update_ha_state())
@asyncio.coroutine
def async_close_cover(self, **kwargs):
"""Move the cover down.
This method is a coroutine.
"""
mqtt.async_publish(
self.hass, self._command_topic, self._payload_close, self._qos,
self._retain)
if self._optimistic:
# Optimistically assume that cover has changed state.
self._state = True
self.hass.async_add_job(self.async_update_ha_state())
@asyncio.coroutine
def async_stop_cover(self, **kwargs):
"""Stop the device.
This method is a coroutine.
"""
mqtt.async_publish(
self.hass, self._command_topic, self._payload_stop, self._qos,
self._retain)
@asyncio.coroutine
def async_open_cover_tilt(self, **kwargs):
"""Tilt the cover open."""
mqtt.async_publish(self.hass, self._tilt_command_topic,
self._tilt_open_position, self._qos,
self._retain)
if self._tilt_optimistic:
self._tilt_value = self._tilt_open_position
self.hass.async_add_job(self.async_update_ha_state())
@asyncio.coroutine
def async_close_cover_tilt(self, **kwargs):
"""Tilt the cover closed."""
mqtt.async_publish(self.hass, self._tilt_command_topic,
self._tilt_closed_position, self._qos,
self._retain)
if self._tilt_optimistic:
self._tilt_value = self._tilt_closed_position
self.hass.async_add_job(self.async_update_ha_state())
@asyncio.coroutine
def async_set_cover_tilt_position(self, **kwargs):
"""Move the cover tilt to a specific position."""
if ATTR_TILT_POSITION not in kwargs:
return
position = float(kwargs[ATTR_TILT_POSITION])
# The position needs to be between min and max
level = self.find_in_range_from_percent(position)
mqtt.async_publish(self.hass, self._tilt_command_topic,
level, self._qos, self._retain)
@asyncio.coroutine
def async_set_cover_position(self, **kwargs):
"""Move the cover to a specific position."""
if ATTR_POSITION in kwargs:
position = kwargs[ATTR_POSITION]
if self._set_position_template is not None:
try:
position = self._set_position_template.async_render(
**kwargs)
except TemplateError as ex:
_LOGGER.error(ex)
self._state = None
mqtt.async_publish(self.hass, self._position_topic,
position, self._qos, self._retain)
def find_percentage_in_range(self, position):
"""Find the 0-100% value within the specified range."""
# the range of motion as defined by the min max values
tilt_range = self._tilt_max - self._tilt_min
# offset to be zero based
offset_position = position - self._tilt_min
# the percentage value within the range
position_percentage = float(offset_position) / tilt_range * 100.0
if self._tilt_invert:
return 100 - position_percentage
return position_percentage
def find_in_range_from_percent(self, percentage):
"""
Find the adjusted value for 0-100% within the specified range.
if the range is 80-180 and the percentage is 90
this method would determine the value to send on the topic
by offsetting the max and min, getting the percentage value and
returning the offset
"""
offset = self._tilt_min
tilt_range = self._tilt_max - self._tilt_min
position = round(tilt_range * (percentage / 100.0))
position += offset
if self._tilt_invert:
position = self._tilt_max - position + offset
return position
|
robert-impey/tree-sorter | refs/heads/master | test_arelinessortedtree.py | 1 | #!/usr/bin/env python3
from treesorting import *
import unittest
class TestEmpty(unittest.TestCase):
def setUp(self):
self.lines = []
def test_empty_set_is_sorted(self):
lines_are_sorted_tree = are_lines_sorted_tree(self.lines)
self.assertTrue(lines_are_sorted_tree)
class TestSingleTree(unittest.TestCase):
def setUp(self):
self.lines = ['foo']
def test_text(self):
lines_are_sorted_tree = are_lines_sorted_tree(self.lines)
self.assertTrue(lines_are_sorted_tree)
class TestOneDeepInOrder(unittest.TestCase):
def setUp(self):
self.lines = []
self.lines.append("foo")
self.lines.append(" bar")
self.lines.append(" gaz")
def test_assessment(self):
self.assertTrue(are_lines_sorted_tree(self.lines))
class TestOneDeepOutOfOrder(unittest.TestCase):
def setUp(self):
self.lines = []
self.lines.append("foo")
self.lines.append(" gaz")
self.lines.append(" bar")
def test_assessment(self):
self.assertFalse(are_lines_sorted_tree(self.lines))
class TestTwoDeepInOrder(unittest.TestCase):
def setUp(self):
self.lines = []
self.lines.append('A')
self.lines.append(" Aa")
self.lines.append(" A1")
self.lines.append(" A2")
self.lines.append(" Ab")
self.lines.append(" A1")
self.lines.append(" A2")
self.lines.append('B')
self.lines.append(" Ba")
self.lines.append(" B1")
self.lines.append(" B2")
self.lines.append(" Bb")
self.lines.append(" B1")
self.lines.append(" B2")
def test_assessment(self):
self.assertTrue(are_lines_sorted_tree(self.lines))
class TestTwoDeepOutOfOrder(unittest.TestCase):
def setUp(self):
self.lines = []
self.lines.append('A')
self.lines.append(" Ab")
self.lines.append(" A2")
self.lines.append(" A1")
self.lines.append(" Aa")
self.lines.append(" A2")
self.lines.append(" A1")
self.lines.append('B')
self.lines.append(" Bb")
self.lines.append(" B2")
self.lines.append(" B1")
self.lines.append(" Ba")
self.lines.append(" B2")
self.lines.append(" B1")
def test_assessment(self):
self.assertFalse(are_lines_sorted_tree(self.lines))
class TestTreeFilesWithGapsInOrder(unittest.TestCase):
def test_assessment(self):
self.assertTrue(is_file_sorted_tree('fixtures/two-deep-with-gaps-sorted.txt'))
class TestTreeFilesWithGapsOutOfOrder(unittest.TestCase):
def test_assessment(self):
self.assertFalse(is_file_sorted_tree('fixtures/two-deep-with-gaps.txt'))
class TestTreeFilesInOrderAfterDrop(unittest.TestCase):
def test_assessment(self):
self.assertTrue(is_file_sorted_tree('fixtures/in-order-after-depth-drop.txt'))
class TestTreeFilesOutOfOrderAfterDrop(unittest.TestCase):
def test_assessment(self):
self.assertFalse(is_file_sorted_tree('fixtures/out-of-order-after-depth-drop.txt'))
|
exelearning/iteexe | refs/heads/master | nevow/testutil.py | 14 | # Copyright (c) 2004 Divmod.
# See LICENSE for details.
from nevow import compy, inevow
class FakeChannel:
def __init__(self, site):
self.site = site
class FakeSite:
pass
class FakeSession(compy.Componentized):
def __init__(self, avatar):
compy.Componentized.__init__(self)
self.avatar = avatar
def getLoggedInRoot(self):
return self.avatar
fs = FakeSession(None)
class FakeRequest(compy.Componentized):
__implements__ = inevow.IRequest,
args = {}
failure = None
context = None
def __init__(self, headers=None, args=None, avatar=None, uri=''):
compy.Componentized.__init__(self)
self.uri = uri
self.prepath = ['']
self.postpath = uri.split('?')[0].split('/')
self.headers = headers or {}
self.args = args or {}
self.sess = FakeSession(avatar)
self.site = FakeSite()
self.received_headers = {}
def URLPath(self):
from nevow import url
return url.URL.fromString('')
def getSession(self):
return self.sess
v = ''
def write(self, x):
self.v += x
finished=False
def finish(self):
self.finished = True
def getHeader(self, key):
d = {
'referer': '/',
}
return d[key]
def setHeader(self, key, val):
self.headers[key] = val
def redirect(self, url):
self.redirected_to = url
def getRootURL(self):
return ''
def processingFailed(self, f):
self.failure = f
def setResponseCode(self, code):
self.code = code
def prePathURL(self):
return 'http://localhost/%s'%'/'.join(self.prepath)
def getClientIP(self):
return '127.0.0.1'
try:
from twisted.trial import unittest
FailTest = unittest.FailTest
except:
import unittest
class FailTest(Exception): pass
import sys
class TestCase(unittest.TestCase):
hasBools = (sys.version_info >= (2,3))
_assertions = 0
# This should be migrated to Twisted.
def failUnlessSubstring(self, containee, container, msg=None):
self._assertions += 1
if container.find(containee) == -1:
raise unittest.FailTest, (msg or "%r not in %r" % (containee, container))
def failIfSubstring(self, containee, container, msg=None):
self._assertions += 1
if container.find(containee) != -1:
raise unittest.FailTest, (msg or "%r in %r" % (containee, container))
assertSubstring = failUnlessSubstring
assertNotSubstring = failIfSubstring
def assertNotIdentical(self, first, second, msg=None):
self._assertions += 1
if first is second:
raise FailTest, (msg or '%r is %r' % (first, second))
def failIfIn(self, containee, container, msg=None):
self._assertions += 1
if containee in container:
raise FailTest, (msg or "%r in %r" % (containee, container))
def assertApproximates(self, first, second, tolerance, msg=None):
self._assertions += 1
if abs(first - second) > tolerance:
raise FailTest, (msg or "%s ~== %s" % (first, second))
if not hasattr(TestCase, 'mktemp'):
def mktemp(self):
import tempfile
return tempfile.mktemp()
TestCase.mktemp = mktemp
|
Seagate/swift | refs/heads/master | swift/common/middleware/ratelimit.py | 42 | # Copyright (c) 2010-2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import time
from swift import gettext_ as _
import eventlet
from swift.common.utils import cache_from_env, get_logger, register_swift_info
from swift.proxy.controllers.base import get_account_info, get_container_info
from swift.common.memcached import MemcacheConnectionError
from swift.common.swob import Request, Response
def interpret_conf_limits(conf, name_prefix, info=None):
"""
Parses general parms for rate limits looking for things that
start with the provided name_prefix within the provided conf
and returns lists for both internal use and for /info
:param conf: conf dict to parse
:param name_prefix: prefix of config parms to look for
:param info: set to return extra stuff for /info registration
"""
conf_limits = []
for conf_key in conf:
if conf_key.startswith(name_prefix):
cont_size = int(conf_key[len(name_prefix):])
rate = float(conf[conf_key])
conf_limits.append((cont_size, rate))
conf_limits.sort()
ratelimits = []
conf_limits_info = list(conf_limits)
while conf_limits:
cur_size, cur_rate = conf_limits.pop(0)
if conf_limits:
next_size, next_rate = conf_limits[0]
slope = (float(next_rate) - float(cur_rate)) \
/ (next_size - cur_size)
def new_scope(cur_size, slope, cur_rate):
# making new scope for variables
return lambda x: (x - cur_size) * slope + cur_rate
line_func = new_scope(cur_size, slope, cur_rate)
else:
line_func = lambda x: cur_rate
ratelimits.append((cur_size, cur_rate, line_func))
if info is None:
return ratelimits
else:
return ratelimits, conf_limits_info
def get_maxrate(ratelimits, size):
"""
Returns number of requests allowed per second for given size.
"""
last_func = None
if size:
size = int(size)
for ratesize, rate, func in ratelimits:
if size < ratesize:
break
last_func = func
if last_func:
return last_func(size)
return None
class MaxSleepTimeHitError(Exception):
pass
class RateLimitMiddleware(object):
"""
Rate limiting middleware
Rate limits requests on both an Account and Container level. Limits are
configurable.
"""
BLACK_LIST_SLEEP = 1
def __init__(self, app, conf, logger=None):
self.app = app
self.logger = logger or get_logger(conf, log_route='ratelimit')
self.memcache_client = None
self.account_ratelimit = float(conf.get('account_ratelimit', 0))
self.max_sleep_time_seconds = \
float(conf.get('max_sleep_time_seconds', 60))
self.log_sleep_time_seconds = \
float(conf.get('log_sleep_time_seconds', 0))
self.clock_accuracy = int(conf.get('clock_accuracy', 1000))
self.rate_buffer_seconds = int(conf.get('rate_buffer_seconds', 5))
self.ratelimit_whitelist = \
[acc.strip() for acc in
conf.get('account_whitelist', '').split(',') if acc.strip()]
self.ratelimit_blacklist = \
[acc.strip() for acc in
conf.get('account_blacklist', '').split(',') if acc.strip()]
self.container_ratelimits = interpret_conf_limits(
conf, 'container_ratelimit_')
self.container_listing_ratelimits = interpret_conf_limits(
conf, 'container_listing_ratelimit_')
def get_container_size(self, env):
rv = 0
container_info = get_container_info(
env, self.app, swift_source='RL')
if isinstance(container_info, dict):
rv = container_info.get(
'object_count', container_info.get('container_size', 0))
return rv
def get_ratelimitable_key_tuples(self, req, account_name,
container_name=None, obj_name=None,
global_ratelimit=None):
"""
Returns a list of key (used in memcache), ratelimit tuples. Keys
should be checked in order.
:param req: swob request
:param account_name: account name from path
:param container_name: container name from path
:param obj_name: object name from path
:param global_ratelimit: this account has an account wide
ratelimit on all writes combined
"""
keys = []
# COPYs are not limited
if self.account_ratelimit and \
account_name and container_name and not obj_name and \
req.method in ('PUT', 'DELETE'):
keys.append(("ratelimit/%s" % account_name,
self.account_ratelimit))
if account_name and container_name and obj_name and \
req.method in ('PUT', 'DELETE', 'POST', 'COPY'):
container_size = self.get_container_size(req.environ)
container_rate = get_maxrate(
self.container_ratelimits, container_size)
if container_rate:
keys.append((
"ratelimit/%s/%s" % (account_name, container_name),
container_rate))
if account_name and container_name and not obj_name and \
req.method == 'GET':
container_size = self.get_container_size(req.environ)
container_rate = get_maxrate(
self.container_listing_ratelimits, container_size)
if container_rate:
keys.append((
"ratelimit_listing/%s/%s" % (account_name, container_name),
container_rate))
if account_name and req.method in ('PUT', 'DELETE', 'POST', 'COPY'):
if global_ratelimit:
try:
global_ratelimit = float(global_ratelimit)
if global_ratelimit > 0:
keys.append((
"ratelimit/global-write/%s" % account_name,
global_ratelimit))
except ValueError:
pass
return keys
def _get_sleep_time(self, key, max_rate):
'''
Returns the amount of time (a float in seconds) that the app
should sleep.
:param key: a memcache key
:param max_rate: maximum rate allowed in requests per second
:raises: MaxSleepTimeHitError if max sleep time is exceeded.
'''
try:
now_m = int(round(time.time() * self.clock_accuracy))
time_per_request_m = int(round(self.clock_accuracy / max_rate))
running_time_m = self.memcache_client.incr(
key, delta=time_per_request_m)
need_to_sleep_m = 0
if (now_m - running_time_m >
self.rate_buffer_seconds * self.clock_accuracy):
next_avail_time = int(now_m + time_per_request_m)
self.memcache_client.set(key, str(next_avail_time),
serialize=False)
else:
need_to_sleep_m = \
max(running_time_m - now_m - time_per_request_m, 0)
max_sleep_m = self.max_sleep_time_seconds * self.clock_accuracy
if max_sleep_m - need_to_sleep_m <= self.clock_accuracy * 0.01:
# treat as no-op decrement time
self.memcache_client.decr(key, delta=time_per_request_m)
raise MaxSleepTimeHitError(
"Max Sleep Time Exceeded: %.2f" %
(float(need_to_sleep_m) / self.clock_accuracy))
return float(need_to_sleep_m) / self.clock_accuracy
except MemcacheConnectionError:
return 0
def handle_ratelimit(self, req, account_name, container_name, obj_name):
'''
Performs rate limiting and account white/black listing. Sleeps
if necessary. If self.memcache_client is not set, immediately returns
None.
:param account_name: account name from path
:param container_name: container name from path
:param obj_name: object name from path
'''
if not self.memcache_client:
return None
try:
account_info = get_account_info(req.environ, self.app,
swift_source='RL')
account_global_ratelimit = \
account_info.get('sysmeta', {}).get('global-write-ratelimit')
except ValueError:
account_global_ratelimit = None
if account_name in self.ratelimit_whitelist or \
account_global_ratelimit == 'WHITELIST':
return None
if account_name in self.ratelimit_blacklist or \
account_global_ratelimit == 'BLACKLIST':
self.logger.error(_('Returning 497 because of blacklisting: %s'),
account_name)
eventlet.sleep(self.BLACK_LIST_SLEEP)
return Response(status='497 Blacklisted',
body='Your account has been blacklisted',
request=req)
for key, max_rate in self.get_ratelimitable_key_tuples(
req, account_name, container_name=container_name,
obj_name=obj_name, global_ratelimit=account_global_ratelimit):
try:
need_to_sleep = self._get_sleep_time(key, max_rate)
if self.log_sleep_time_seconds and \
need_to_sleep > self.log_sleep_time_seconds:
self.logger.warning(
_("Ratelimit sleep log: %(sleep)s for "
"%(account)s/%(container)s/%(object)s"),
{'sleep': need_to_sleep, 'account': account_name,
'container': container_name, 'object': obj_name})
if need_to_sleep > 0:
eventlet.sleep(need_to_sleep)
except MaxSleepTimeHitError as e:
self.logger.error(
_('Returning 498 for %(meth)s to %(acc)s/%(cont)s/%(obj)s '
'. Ratelimit (Max Sleep) %(e)s'),
{'meth': req.method, 'acc': account_name,
'cont': container_name, 'obj': obj_name, 'e': str(e)})
error_resp = Response(status='498 Rate Limited',
body='Slow down', request=req)
return error_resp
return None
def __call__(self, env, start_response):
"""
WSGI entry point.
Wraps env in swob.Request object and passes it down.
:param env: WSGI environment dictionary
:param start_response: WSGI callable
"""
req = Request(env)
if self.memcache_client is None:
self.memcache_client = cache_from_env(env)
if not self.memcache_client:
self.logger.warning(
_('Warning: Cannot ratelimit without a memcached client'))
return self.app(env, start_response)
try:
version, account, container, obj = req.split_path(1, 4, True)
except ValueError:
return self.app(env, start_response)
ratelimit_resp = self.handle_ratelimit(req, account, container, obj)
if ratelimit_resp is None:
return self.app(env, start_response)
else:
return ratelimit_resp(env, start_response)
def filter_factory(global_conf, **local_conf):
"""
paste.deploy app factory for creating WSGI proxy apps.
"""
conf = global_conf.copy()
conf.update(local_conf)
account_ratelimit = float(conf.get('account_ratelimit', 0))
max_sleep_time_seconds = \
float(conf.get('max_sleep_time_seconds', 60))
container_ratelimits, cont_limit_info = interpret_conf_limits(
conf, 'container_ratelimit_', info=1)
container_listing_ratelimits, cont_list_limit_info = \
interpret_conf_limits(conf, 'container_listing_ratelimit_', info=1)
# not all limits are exposed (intentionally)
register_swift_info('ratelimit',
account_ratelimit=account_ratelimit,
max_sleep_time_seconds=max_sleep_time_seconds,
container_ratelimits=cont_limit_info,
container_listing_ratelimits=cont_list_limit_info)
def limit_filter(app):
return RateLimitMiddleware(app, conf)
return limit_filter
|
tgsd96/gargnotes | refs/heads/master | venv/lib/python2.7/site-packages/django/contrib/syndication/__init__.py | 808 | default_app_config = 'django.contrib.syndication.apps.SyndicationConfig'
|
bratatidas9/Impala-1 | refs/heads/cdh5-trunk | thirdparty/thrift-0.9.0/contrib/zeromq/TZmqServer.py | 108 | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import logging
import zmq
import thrift.server.TServer
import thrift.transport.TTransport
class TZmqServer(thrift.server.TServer.TServer):
def __init__(self, processor, ctx, endpoint, sock_type):
thrift.server.TServer.TServer.__init__(self, processor, None)
self.zmq_type = sock_type
self.socket = ctx.socket(sock_type)
self.socket.bind(endpoint)
def serveOne(self):
msg = self.socket.recv()
itrans = thrift.transport.TTransport.TMemoryBuffer(msg)
otrans = thrift.transport.TTransport.TMemoryBuffer()
iprot = self.inputProtocolFactory.getProtocol(itrans)
oprot = self.outputProtocolFactory.getProtocol(otrans)
try:
self.processor.process(iprot, oprot)
except Exception:
logging.exception("Exception while processing request")
# Fall through and send back a response, even if empty or incomplete.
if self.zmq_type == zmq.REP:
msg = otrans.getvalue()
self.socket.send(msg)
def serve(self):
while True:
self.serveOne()
class TZmqMultiServer(object):
def __init__(self):
self.servers = []
def serveOne(self, timeout = -1):
self._serveActive(self._setupPoll(), timeout)
def serveForever(self):
poll_info = self._setupPoll()
while True:
self._serveActive(poll_info, -1)
def _setupPoll(self):
server_map = {}
poller = zmq.Poller()
for server in self.servers:
server_map[server.socket] = server
poller.register(server.socket, zmq.POLLIN)
return (server_map, poller)
def _serveActive(self, poll_info, timeout):
(server_map, poller) = poll_info
ready = dict(poller.poll())
for sock, state in ready.items():
assert (state & zmq.POLLIN) != 0
server_map[sock].serveOne()
|
GoogleCloudPlatform/dataflow-speech-redaction | refs/heads/master | srf-longrun-job-dataflow/srflongrunjobdataflow.py | 1 | # Copyright 2021 Google LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# [START SRF pubsub_to_cloud_storage]
import argparse
import logging
import json
import time
import apache_beam as beam
import google.cloud.dlp
import uuid
from apache_beam.io import filesystems
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.options.pipeline_options import StandardOptions
from apache_beam.options.pipeline_options import GoogleCloudOptions
from google.cloud.dlp import DlpServiceClient
class WriteToSeparateFiles(beam.DoFn):
def __init__(self, outdir):
self.outdir = outdir
def process(self, element):
x = uuid.uuid4()
record = json.loads(element)
file_name = record['filename'].split("/")
writer = filesystems.FileSystems.create(self.outdir + file_name[-1] + "_" + str(x)[:8] + ".json")
writer.write(json.dumps(record).encode("utf8"))
writer.close()
# function to get STT data from long audio file using asynchronous speech recognition
def stt_output_response(data):
from oauth2client.client import GoogleCredentials
from googleapiclient import discovery
credentials = GoogleCredentials.get_application_default()
pub_sub_data = json.loads(data)
speech_service = discovery.build('speech', 'v1p1beta1', credentials=credentials)
get_operation = speech_service.operations().get(name=pub_sub_data['sttnameid'])
response = get_operation.execute()
# handle polling of STT
if pub_sub_data['duration'] != 'NA':
sleep_duration = round(int(float(pub_sub_data['duration'])) / 2)
else:
sleep_duration = 5
logging.info('Sleeping for: %s', sleep_duration)
time.sleep(sleep_duration)
retry_count = 10
while retry_count > 0 and not response.get('done', False):
retry_count -= 1
time.sleep(120)
response = get_operation.execute()
# return response to include STT data and agent search word
response_list = [response,
pub_sub_data['filename']
]
return response_list
# function to get enrich stt_output function response
def stt_parse_response(stt_data):
parse_stt_output_response = {
'filename': stt_data[1],
'transcript': None,
'words': [],
'dlp': [],
}
string_transcript = ''
# get transcript from stt_data
for i in stt_data[0]['response']['results']:
if 'transcript' in i['alternatives'][0]:
string_transcript += str(i['alternatives'][0]['transcript']) + ' '
parse_stt_output_response['transcript'] = string_transcript[:-1] # remove the ending whitespace
for element in stt_data[0]['response']['results']:
for word in element['alternatives'][0]['words']:
parse_stt_output_response['words'].append(
{'word': word['word'], 'startsecs': word['startTime'].strip('s'),
'endsecs': word['endTime'].strip('s')})
return parse_stt_output_response
def destination(element):
return json.loads(element)["filename"]
# function to redact sensitive data from audio file
def redact_text(data, project):
logging.info(data)
dlp = google.cloud.dlp_v2.DlpServiceClient()
parent = dlp.project_path(project)
response = dlp.list_info_types('en-US')
# This will detect PII data for the info types listed
# https://cloud.google.com/dlp/docs/infotypes-reference
info_types = ["PERSON_NAME",
"PHONE_NUMBER",
"ORGANIZATION_NAME",
"FIRST_NAME",
"LAST_NAME",
"EMAIL_ADDRESS",
"DATE_OF_BIRTH",
"EMAIL_ADDRESS",
"US_SOCIAL_SECURITY_NUMBER",
"STREET_ADDRESS"
]
info_types = [{"name": info_type} for info_type in info_types]
inspect_config = {
"info_types": info_types,
"include_quote":True
}
logging.info(data['transcript'])
item = {"value": data['transcript']}
response = dlp.inspect_content(
parent,
inspect_config=inspect_config,
item=item,
)
logging.info(response)
if response.result.findings:
for finding in response.result.findings:
try:
if finding.quote:
print("Quote: {}".format(finding.quote))
data['dlp'].append(finding.quote)
except AttributeError:
pass
else:
print("No findings.")
return data
def run(argv=None, save_main_session=True):
"""Build and run the pipeline."""
parser = argparse.ArgumentParser()
group = parser.add_mutually_exclusive_group(required=True)
group.add_argument(
'--input_topic',
help=('Input PubSub topic of the form '
'"projects/<PROJECT>/topics/<TOPIC>".'))
group.add_argument(
'--input_subscription',
help=('Input PubSub subscription of the form '
'"projects/<PROJECT>/subscriptions/<SUBSCRIPTION>."'))
parser.add_argument('--output', required=True,
help='Output BQ table to write results to '
'"PROJECT_ID:DATASET.TABLE"')
known_args, pipeline_args = parser.parse_known_args(argv)
pipeline_options = PipelineOptions(pipeline_args)
project_id = pipeline_options.view_as(GoogleCloudOptions).project
pipeline_options.view_as(SetupOptions).save_main_session = save_main_session
pipeline_options.view_as(StandardOptions).streaming = True
p = beam.Pipeline(options=pipeline_options)
# Read from PubSub into a PCollection.
if known_args.input_subscription:
messages = (p
| beam.io.ReadFromPubSub(
subscription=known_args.input_subscription)
.with_output_types(bytes))
else:
messages = (p
| beam.io.ReadFromPubSub(topic=known_args.input_topic)
.with_output_types(bytes))
decode_messages = messages | 'DecodePubSubMessages' >> beam.Map(lambda x: x.decode('utf-8'))
# Get STT data from function for long audio file using asynchronous speech recognition
stt_output = decode_messages | 'SpeechToTextOutput' >> beam.Map(stt_output_response)
# Parse and enrich stt_output response
parse_stt_output = stt_output | 'ParseSpeechToText' >> beam.Map(stt_parse_response)
# Google Cloud DLP redaction for all info types
dlp_output = parse_stt_output | 'FindDLP' >> beam.Map(lambda j: redact_text(j, project_id))
# Convert to JSON
json_output = dlp_output | 'JSONDumps' >> beam.Map(json.dumps)
# Write findings to Cloud Storage
json_output | 'WriteFindings' >> beam.ParDo(WriteToSeparateFiles(known_args.output))
p.run()
if __name__ == '__main__':
logging.getLogger().setLevel(logging.DEBUG)
run()
# [END SRF pubsub_to_cloud_storage] |
peterjoel/servo | refs/heads/master | tests/wpt/web-platform-tests/tools/third_party/h2/examples/fragments/server_upgrade_fragment.py | 25 | # -*- coding: utf-8 -*-
"""
Server Plaintext Upgrade
~~~~~~~~~~~~~~~~~~~~~~~~
This example code fragment demonstrates how to set up a HTTP/2 server that uses
the plaintext HTTP Upgrade mechanism to negotiate HTTP/2 connectivity. For
maximum explanatory value it uses the synchronous socket API that comes with
the Python standard library. In product code you will want to use an actual
HTTP/1.1 server library if possible.
This code requires Python 3.5 or later.
"""
import h2.config
import h2.connection
import re
import socket
def establish_tcp_connection():
"""
This function establishes a server-side TCP connection. How it works isn't
very important to this example.
"""
bind_socket = socket.socket()
bind_socket.bind(('', 443))
bind_socket.listen(5)
return bind_socket.accept()[0]
def receive_initial_request(connection):
"""
We're going to receive a request. For the sake of this example, we're going
to assume that the first request has no body. If it doesn't have the
Upgrade: h2c header field and the HTTP2-Settings header field, we'll throw
errors.
In production code, you should use a proper HTTP/1.1 parser and actually
serve HTTP/1.1 requests!
Returns the value of the HTTP2-Settings header field.
"""
data = b''
while not data.endswith(b'\r\n\r\n'):
data += connection.recv(8192)
match = re.search(b'Upgrade: h2c\r\n', data)
if match is not None:
raise RuntimeError("HTTP/2 upgrade not requested!")
# We need to look for the HTTP2-Settings header field. Again, in production
# code you shouldn't use regular expressions for this, but it's good enough
# for the example.
match = re.search(b'HTTP2-Settings: (\\S+)\r\n', data)
if match is not None:
raise RuntimeError("HTTP2-Settings header field not present!")
return match.group(1)
def send_upgrade_response(connection):
"""
This function writes the 101 Switching Protocols response.
"""
response = (
b"HTTP/1.1 101 Switching Protocols\r\n"
b"Upgrade: h2c\r\n"
b"\r\n"
)
connection.sendall(response)
def main():
"""
The server upgrade flow.
"""
# Step 1: Establish the TCP connecton.
connection = establish_tcp_connection()
# Step 2: Read the response. We expect this to request an upgrade.
settings_header_value = receive_initial_request(connection)
# Step 3: Create a H2Connection object in server mode, and pass it the
# value of the HTTP2-Settings header field.
config = h2.config.H2Configuration(client_side=False)
h2_connection = h2.connection.H2Connection(config=config)
h2_connection.initiate_upgrade_connection(
settings_header=settings_header_value
)
# Step 4: Send the 101 Switching Protocols response.
send_upgrade_response(connection)
# Step 5: Send pending HTTP/2 data.
connection.sendall(h2_connection.data_to_send())
# At this point, you can enter your main loop. The first step has to be to
# send the response to the initial HTTP/1.1 request you received on stream
# 1.
main_loop()
|
Andrew-McNab-UK/DIRAC | refs/heads/integration | tests/Integration/WorkloadManagementSystem/exe-script.py | 22 | #!/usr/bin/env python
'''Script to run Executable application'''
import sys
from os import system
# Main
if __name__ == '__main__':
sys.exit(system('''echo Hello World''')/256)
|
botswana-harvard/getresults-receive | refs/heads/develop | getresults_receive/tests/factories/receive_factory.py | 1 | import factory
from getresults_receive.models import Receive
from django.utils import timezone
class ReceiveFactory(factory.DjangoModelFactory):
class Meta:
model = Receive
receive_identifier = factory.Sequence(lambda n: 'APA{0}K-{0}'.format(n))
receive_datetime = timezone.now()
collection_datetime = timezone.now()
patient = factory.Sequence(lambda n: '099-21444678-{0}'.format(n))
clinician_initials = 'DD'
specimen_type = 'WB'
protocol_number = 'BHHRL'
batch_identifier = factory.Sequence(lambda n: 'XXHT-{0}'.format(n))
specimen_reference = factory.Sequence(lambda n: 'MMA{0}K-{0}'.format(n))
site_code = '02'
tube_count = 1
|
madflow/weblate | refs/heads/master | weblate/trans/models/changes.py | 7 | # -*- coding: utf-8 -*-
#
# Copyright ยฉ 2012 - 2015 Michal ฤihaล <michal@cihar.com>
#
# This file is part of Weblate <http://weblate.org/>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from django.db import models
from django.contrib.auth.models import User
from django.db.models import Count, Q
from django.utils.translation import ugettext as _, ugettext_lazy
from django.utils import timezone
from weblate.trans.models.project import Project
from weblate.accounts.avatar import get_user_display
class ChangeManager(models.Manager):
# pylint: disable=W0232
def content(self, prefetch=False):
'''
Returns queryset with content changes.
'''
base = self
if prefetch:
base = base.prefetch()
return base.filter(
action__in=Change.ACTIONS_CONTENT,
user__isnull=False,
)
def count_stats(self, days, step, dtstart, base):
'''
Counts number of changes in given dataset and period grouped by
step days.
'''
# Count number of changes
result = []
for dummy in xrange(0, days, step):
# Calculate interval
int_start = dtstart
int_end = int_start + timezone.timedelta(days=step)
# Count changes
int_base = base.filter(timestamp__range=(int_start, int_end))
count = int_base.aggregate(Count('id'))
# Append to result
result.append((int_start, count['id__count']))
# Advance to next interval
dtstart = int_end
return result
def base_stats(self, days, step,
project=None, subproject=None, translation=None,
language=None, user=None):
'''
Core of daily/weekly/monthly stats calculation.
'''
# Get range (actually start)
dtstart = timezone.now().date() - timezone.timedelta(days=days + 1)
# Base for filtering
base = self.all()
# Filter by translation/project
if translation is not None:
base = base.filter(translation=translation)
elif subproject is not None:
base = base.filter(translation__subproject=subproject)
elif project is not None:
base = base.filter(translation__subproject__project=project)
# Filter by language
if language is not None:
base = base.filter(translation__language=language)
# Filter by language
if user is not None:
base = base.filter(user=user)
return self.count_stats(days, step, dtstart, base)
def prefetch(self):
'''
Fetches related fields in a big chungs to avoid loading them
individually.
'''
return self.prefetch_related(
'user', 'translation', 'unit', 'dictionary',
'translation__subproject', 'translation__language',
'translation__subproject__project',
)
def last_changes(self, user):
'''
Prefilters Changes by ACL for users and fetches related fields
for last changes display.
'''
result = self.prefetch()
acl_projects, filtered = Project.objects.get_acl_status(user)
if filtered:
result = result.filter(
Q(subproject__project__in=acl_projects) |
Q(dictionary__project__in=acl_projects)
)
return result
class Change(models.Model):
ACTION_UPDATE = 0
ACTION_COMPLETE = 1
ACTION_CHANGE = 2
ACTION_COMMENT = 3
ACTION_SUGGESTION = 4
ACTION_NEW = 5
ACTION_AUTO = 6
ACTION_ACCEPT = 7
ACTION_REVERT = 8
ACTION_UPLOAD = 9
ACTION_DICTIONARY_NEW = 10
ACTION_DICTIONARY_EDIT = 11
ACTION_DICTIONARY_UPLOAD = 12
ACTION_NEW_SOURCE = 13
ACTION_LOCK = 14
ACTION_UNLOCK = 15
ACTION_DUPLICATE_STRING = 16
ACTION_COMMIT = 17
ACTION_PUSH = 18
ACTION_RESET = 19
ACTION_MERGE = 20
ACTION_REBASE = 21
ACTION_FAILED_MERGE = 22
ACTION_FAILED_REBASE = 23
ACTION_CHOICES = (
(ACTION_UPDATE, ugettext_lazy('Resource update')),
(ACTION_COMPLETE, ugettext_lazy('Translation completed')),
(ACTION_CHANGE, ugettext_lazy('Translation changed')),
(ACTION_NEW, ugettext_lazy('New translation')),
(ACTION_COMMENT, ugettext_lazy('Comment added')),
(ACTION_SUGGESTION, ugettext_lazy('Suggestion added')),
(ACTION_AUTO, ugettext_lazy('Automatic translation')),
(ACTION_ACCEPT, ugettext_lazy('Suggestion accepted')),
(ACTION_REVERT, ugettext_lazy('Translation reverted')),
(ACTION_UPLOAD, ugettext_lazy('Translation uploaded')),
(ACTION_DICTIONARY_NEW, ugettext_lazy('Glossary added')),
(ACTION_DICTIONARY_EDIT, ugettext_lazy('Glossary updated')),
(ACTION_DICTIONARY_UPLOAD, ugettext_lazy('Glossary uploaded')),
(ACTION_NEW_SOURCE, ugettext_lazy('New source string')),
(ACTION_LOCK, ugettext_lazy('Component locked')),
(ACTION_UNLOCK, ugettext_lazy('Component unlocked')),
(ACTION_DUPLICATE_STRING, ugettext_lazy('Detected duplicate string')),
(ACTION_COMMIT, ugettext_lazy('Commited changes')),
(ACTION_PUSH, ugettext_lazy('Pushed changes')),
(ACTION_RESET, ugettext_lazy('Reset repository')),
(ACTION_MERGE, ugettext_lazy('Merged repository')),
(ACTION_REBASE, ugettext_lazy('Rebased repository')),
(ACTION_FAILED_MERGE, ugettext_lazy('Failed merge on repository')),
(ACTION_FAILED_REBASE, ugettext_lazy('Failed rebase on repository')),
)
ACTIONS_SUBPROJECT = set((
ACTION_LOCK,
ACTION_UNLOCK,
ACTION_DUPLICATE_STRING,
ACTION_PUSH,
ACTION_RESET,
ACTION_MERGE,
ACTION_REBASE,
ACTION_FAILED_MERGE,
ACTION_FAILED_REBASE,
))
ACTIONS_REVERTABLE = set((
ACTION_ACCEPT,
ACTION_REVERT,
ACTION_CHANGE,
ACTION_NEW,
))
ACTIONS_CONTENT = set((
ACTION_CHANGE,
ACTION_NEW,
ACTION_AUTO,
ACTION_ACCEPT,
ACTION_REVERT,
ACTION_UPLOAD,
))
ACTIONS_REPOSITORY = set((
ACTION_PUSH,
ACTION_RESET,
ACTION_MERGE,
ACTION_REBASE,
ACTION_FAILED_MERGE,
ACTION_FAILED_REBASE,
))
ACTIONS_MERGE_FAILURE = set((
ACTION_FAILED_MERGE,
ACTION_FAILED_REBASE,
))
unit = models.ForeignKey('Unit', null=True)
subproject = models.ForeignKey('SubProject', null=True)
translation = models.ForeignKey('Translation', null=True)
dictionary = models.ForeignKey('Dictionary', null=True)
user = models.ForeignKey(User, null=True)
author = models.ForeignKey(User, null=True, related_name='author_set')
timestamp = models.DateTimeField(auto_now_add=True, db_index=True)
action = models.IntegerField(
choices=ACTION_CHOICES,
default=ACTION_CHANGE
)
target = models.TextField(default='', blank=True)
objects = ChangeManager()
class Meta(object):
ordering = ['-timestamp']
app_label = 'trans'
def __unicode__(self):
return _('%(action)s at %(time)s on %(translation)s by %(user)s') % {
'action': self.get_action_display(),
'time': self.timestamp,
'translation': self.translation,
'user': self.get_user_display(False),
}
def is_merge_failure(self):
return self.action in self.ACTIONS_MERGE_FAILURE
def get_user_display(self, icon=True):
return get_user_display(self.user, icon, link=True)
def get_absolute_url(self):
'''
Returns link either to unit or translation.
'''
if self.unit is not None:
return self.unit.get_absolute_url()
return self.get_translation_url()
def get_translation_url(self):
'''
Returns URL for translation.
'''
if self.translation is not None:
return self.translation.get_absolute_url()
elif self.subproject is not None:
return self.subproject.get_absolute_url()
elif self.dictionary is not None:
return self.dictionary.get_parent_url()
return None
def get_translation_display(self):
'''
Returns display name for translation.
'''
if self.translation is not None:
return unicode(self.translation)
elif self.subproject is not None:
return unicode(self.subproject)
elif self.dictionary is not None:
return '%s/%s' % (
self.dictionary.project,
self.dictionary.language
)
return None
def can_revert(self):
return (
self.unit is not None and
self.target and
self.action in self.ACTIONS_REVERTABLE
)
def save(self, *args, **kwargs):
if self.unit:
self.translation = self.unit.translation
if self.translation:
self.subproject = self.translation.subproject
super(Change, self).save(*args, **kwargs)
|
40223114/w16b_test | refs/heads/master | static/Brython3.1.1-20150328-091302/Lib/fractions.py | 722 | # Originally contributed by Sjoerd Mullender.
# Significantly modified by Jeffrey Yasskin <jyasskin at gmail.com>.
"""Fraction, infinite-precision, real numbers."""
from decimal import Decimal
import math
import numbers
import operator
import re
import sys
__all__ = ['Fraction', 'gcd']
def gcd(a, b):
"""Calculate the Greatest Common Divisor of a and b.
Unless b==0, the result will have the same sign as b (so that when
b is divided by it, the result comes out positive).
"""
while b:
a, b = b, a%b
return a
# Constants related to the hash implementation; hash(x) is based
# on the reduction of x modulo the prime _PyHASH_MODULUS.
_PyHASH_MODULUS = sys.hash_info.modulus
# Value to be used for rationals that reduce to infinity modulo
# _PyHASH_MODULUS.
_PyHASH_INF = sys.hash_info.inf
_RATIONAL_FORMAT = re.compile(r"""
\A\s* # optional whitespace at the start, then
(?P<sign>[-+]?) # an optional sign, then
(?=\d|\.\d) # lookahead for digit or .digit
(?P<num>\d*) # numerator (possibly empty)
(?: # followed by
(?:/(?P<denom>\d+))? # an optional denominator
| # or
(?:\.(?P<decimal>\d*))? # an optional fractional part
(?:E(?P<exp>[-+]?\d+))? # and optional exponent
)
\s*\Z # and optional whitespace to finish
""", re.VERBOSE | re.IGNORECASE)
class Fraction(numbers.Rational):
"""This class implements rational numbers.
In the two-argument form of the constructor, Fraction(8, 6) will
produce a rational number equivalent to 4/3. Both arguments must
be Rational. The numerator defaults to 0 and the denominator
defaults to 1 so that Fraction(3) == 3 and Fraction() == 0.
Fractions can also be constructed from:
- numeric strings similar to those accepted by the
float constructor (for example, '-2.3' or '1e10')
- strings of the form '123/456'
- float and Decimal instances
- other Rational instances (including integers)
"""
__slots__ = ('_numerator', '_denominator')
# We're immutable, so use __new__ not __init__
def __new__(cls, numerator=0, denominator=None):
"""Constructs a Rational.
Takes a string like '3/2' or '1.5', another Rational instance, a
numerator/denominator pair, or a float.
Examples
--------
>>> Fraction(10, -8)
Fraction(-5, 4)
>>> Fraction(Fraction(1, 7), 5)
Fraction(1, 35)
>>> Fraction(Fraction(1, 7), Fraction(2, 3))
Fraction(3, 14)
>>> Fraction('314')
Fraction(314, 1)
>>> Fraction('-35/4')
Fraction(-35, 4)
>>> Fraction('3.1415') # conversion from numeric string
Fraction(6283, 2000)
>>> Fraction('-47e-2') # string may include a decimal exponent
Fraction(-47, 100)
>>> Fraction(1.47) # direct construction from float (exact conversion)
Fraction(6620291452234629, 4503599627370496)
>>> Fraction(2.25)
Fraction(9, 4)
>>> Fraction(Decimal('1.47'))
Fraction(147, 100)
"""
self = super(Fraction, cls).__new__(cls)
if denominator is None:
if isinstance(numerator, numbers.Rational):
self._numerator = numerator.numerator
self._denominator = numerator.denominator
return self
elif isinstance(numerator, float):
# Exact conversion from float
value = Fraction.from_float(numerator)
self._numerator = value._numerator
self._denominator = value._denominator
return self
elif isinstance(numerator, Decimal):
value = Fraction.from_decimal(numerator)
self._numerator = value._numerator
self._denominator = value._denominator
return self
elif isinstance(numerator, str):
# Handle construction from strings.
m = _RATIONAL_FORMAT.match(numerator)
if m is None:
raise ValueError('Invalid literal for Fraction: %r' %
numerator)
numerator = int(m.group('num') or '0')
denom = m.group('denom')
if denom:
denominator = int(denom)
else:
denominator = 1
decimal = m.group('decimal')
if decimal:
scale = 10**len(decimal)
numerator = numerator * scale + int(decimal)
denominator *= scale
exp = m.group('exp')
if exp:
exp = int(exp)
if exp >= 0:
numerator *= 10**exp
else:
denominator *= 10**-exp
if m.group('sign') == '-':
numerator = -numerator
else:
raise TypeError("argument should be a string "
"or a Rational instance")
elif (isinstance(numerator, numbers.Rational) and
isinstance(denominator, numbers.Rational)):
numerator, denominator = (
numerator.numerator * denominator.denominator,
denominator.numerator * numerator.denominator
)
else:
raise TypeError("both arguments should be "
"Rational instances")
if denominator == 0:
raise ZeroDivisionError('Fraction(%s, 0)' % numerator)
g = gcd(numerator, denominator)
self._numerator = numerator // g
self._denominator = denominator // g
return self
@classmethod
def from_float(cls, f):
"""Converts a finite float to a rational number, exactly.
Beware that Fraction.from_float(0.3) != Fraction(3, 10).
"""
if isinstance(f, numbers.Integral):
return cls(f)
elif not isinstance(f, float):
raise TypeError("%s.from_float() only takes floats, not %r (%s)" %
(cls.__name__, f, type(f).__name__))
if math.isnan(f):
raise ValueError("Cannot convert %r to %s." % (f, cls.__name__))
if math.isinf(f):
raise OverflowError("Cannot convert %r to %s." % (f, cls.__name__))
return cls(*f.as_integer_ratio())
@classmethod
def from_decimal(cls, dec):
"""Converts a finite Decimal instance to a rational number, exactly."""
from decimal import Decimal
if isinstance(dec, numbers.Integral):
dec = Decimal(int(dec))
elif not isinstance(dec, Decimal):
raise TypeError(
"%s.from_decimal() only takes Decimals, not %r (%s)" %
(cls.__name__, dec, type(dec).__name__))
if dec.is_infinite():
raise OverflowError(
"Cannot convert %s to %s." % (dec, cls.__name__))
if dec.is_nan():
raise ValueError("Cannot convert %s to %s." % (dec, cls.__name__))
sign, digits, exp = dec.as_tuple()
digits = int(''.join(map(str, digits)))
if sign:
digits = -digits
if exp >= 0:
return cls(digits * 10 ** exp)
else:
return cls(digits, 10 ** -exp)
def limit_denominator(self, max_denominator=1000000):
"""Closest Fraction to self with denominator at most max_denominator.
>>> Fraction('3.141592653589793').limit_denominator(10)
Fraction(22, 7)
>>> Fraction('3.141592653589793').limit_denominator(100)
Fraction(311, 99)
>>> Fraction(4321, 8765).limit_denominator(10000)
Fraction(4321, 8765)
"""
# Algorithm notes: For any real number x, define a *best upper
# approximation* to x to be a rational number p/q such that:
#
# (1) p/q >= x, and
# (2) if p/q > r/s >= x then s > q, for any rational r/s.
#
# Define *best lower approximation* similarly. Then it can be
# proved that a rational number is a best upper or lower
# approximation to x if, and only if, it is a convergent or
# semiconvergent of the (unique shortest) continued fraction
# associated to x.
#
# To find a best rational approximation with denominator <= M,
# we find the best upper and lower approximations with
# denominator <= M and take whichever of these is closer to x.
# In the event of a tie, the bound with smaller denominator is
# chosen. If both denominators are equal (which can happen
# only when max_denominator == 1 and self is midway between
# two integers) the lower bound---i.e., the floor of self, is
# taken.
if max_denominator < 1:
raise ValueError("max_denominator should be at least 1")
if self._denominator <= max_denominator:
return Fraction(self)
p0, q0, p1, q1 = 0, 1, 1, 0
n, d = self._numerator, self._denominator
while True:
a = n//d
q2 = q0+a*q1
if q2 > max_denominator:
break
p0, q0, p1, q1 = p1, q1, p0+a*p1, q2
n, d = d, n-a*d
k = (max_denominator-q0)//q1
bound1 = Fraction(p0+k*p1, q0+k*q1)
bound2 = Fraction(p1, q1)
if abs(bound2 - self) <= abs(bound1-self):
return bound2
else:
return bound1
@property
def numerator(a):
return a._numerator
@property
def denominator(a):
return a._denominator
def __repr__(self):
"""repr(self)"""
return ('Fraction(%s, %s)' % (self._numerator, self._denominator))
def __str__(self):
"""str(self)"""
if self._denominator == 1:
return str(self._numerator)
else:
return '%s/%s' % (self._numerator, self._denominator)
def _operator_fallbacks(monomorphic_operator, fallback_operator):
"""Generates forward and reverse operators given a purely-rational
operator and a function from the operator module.
Use this like:
__op__, __rop__ = _operator_fallbacks(just_rational_op, operator.op)
In general, we want to implement the arithmetic operations so
that mixed-mode operations either call an implementation whose
author knew about the types of both arguments, or convert both
to the nearest built in type and do the operation there. In
Fraction, that means that we define __add__ and __radd__ as:
def __add__(self, other):
# Both types have numerators/denominator attributes,
# so do the operation directly
if isinstance(other, (int, Fraction)):
return Fraction(self.numerator * other.denominator +
other.numerator * self.denominator,
self.denominator * other.denominator)
# float and complex don't have those operations, but we
# know about those types, so special case them.
elif isinstance(other, float):
return float(self) + other
elif isinstance(other, complex):
return complex(self) + other
# Let the other type take over.
return NotImplemented
def __radd__(self, other):
# radd handles more types than add because there's
# nothing left to fall back to.
if isinstance(other, numbers.Rational):
return Fraction(self.numerator * other.denominator +
other.numerator * self.denominator,
self.denominator * other.denominator)
elif isinstance(other, Real):
return float(other) + float(self)
elif isinstance(other, Complex):
return complex(other) + complex(self)
return NotImplemented
There are 5 different cases for a mixed-type addition on
Fraction. I'll refer to all of the above code that doesn't
refer to Fraction, float, or complex as "boilerplate". 'r'
will be an instance of Fraction, which is a subtype of
Rational (r : Fraction <: Rational), and b : B <:
Complex. The first three involve 'r + b':
1. If B <: Fraction, int, float, or complex, we handle
that specially, and all is well.
2. If Fraction falls back to the boilerplate code, and it
were to return a value from __add__, we'd miss the
possibility that B defines a more intelligent __radd__,
so the boilerplate should return NotImplemented from
__add__. In particular, we don't handle Rational
here, even though we could get an exact answer, in case
the other type wants to do something special.
3. If B <: Fraction, Python tries B.__radd__ before
Fraction.__add__. This is ok, because it was
implemented with knowledge of Fraction, so it can
handle those instances before delegating to Real or
Complex.
The next two situations describe 'b + r'. We assume that b
didn't know about Fraction in its implementation, and that it
uses similar boilerplate code:
4. If B <: Rational, then __radd_ converts both to the
builtin rational type (hey look, that's us) and
proceeds.
5. Otherwise, __radd__ tries to find the nearest common
base ABC, and fall back to its builtin type. Since this
class doesn't subclass a concrete type, there's no
implementation to fall back to, so we need to try as
hard as possible to return an actual value, or the user
will get a TypeError.
"""
def forward(a, b):
if isinstance(b, (int, Fraction)):
return monomorphic_operator(a, b)
elif isinstance(b, float):
return fallback_operator(float(a), b)
elif isinstance(b, complex):
return fallback_operator(complex(a), b)
else:
return NotImplemented
forward.__name__ = '__' + fallback_operator.__name__ + '__'
forward.__doc__ = monomorphic_operator.__doc__
def reverse(b, a):
if isinstance(a, numbers.Rational):
# Includes ints.
return monomorphic_operator(a, b)
elif isinstance(a, numbers.Real):
return fallback_operator(float(a), float(b))
elif isinstance(a, numbers.Complex):
return fallback_operator(complex(a), complex(b))
else:
return NotImplemented
reverse.__name__ = '__r' + fallback_operator.__name__ + '__'
reverse.__doc__ = monomorphic_operator.__doc__
return forward, reverse
def _add(a, b):
"""a + b"""
return Fraction(a.numerator * b.denominator +
b.numerator * a.denominator,
a.denominator * b.denominator)
__add__, __radd__ = _operator_fallbacks(_add, operator.add)
def _sub(a, b):
"""a - b"""
return Fraction(a.numerator * b.denominator -
b.numerator * a.denominator,
a.denominator * b.denominator)
__sub__, __rsub__ = _operator_fallbacks(_sub, operator.sub)
def _mul(a, b):
"""a * b"""
return Fraction(a.numerator * b.numerator, a.denominator * b.denominator)
__mul__, __rmul__ = _operator_fallbacks(_mul, operator.mul)
def _div(a, b):
"""a / b"""
return Fraction(a.numerator * b.denominator,
a.denominator * b.numerator)
__truediv__, __rtruediv__ = _operator_fallbacks(_div, operator.truediv)
def __floordiv__(a, b):
"""a // b"""
return math.floor(a / b)
def __rfloordiv__(b, a):
"""a // b"""
return math.floor(a / b)
def __mod__(a, b):
"""a % b"""
div = a // b
return a - b * div
def __rmod__(b, a):
"""a % b"""
div = a // b
return a - b * div
def __pow__(a, b):
"""a ** b
If b is not an integer, the result will be a float or complex
since roots are generally irrational. If b is an integer, the
result will be rational.
"""
if isinstance(b, numbers.Rational):
if b.denominator == 1:
power = b.numerator
if power >= 0:
return Fraction(a._numerator ** power,
a._denominator ** power)
else:
return Fraction(a._denominator ** -power,
a._numerator ** -power)
else:
# A fractional power will generally produce an
# irrational number.
return float(a) ** float(b)
else:
return float(a) ** b
def __rpow__(b, a):
"""a ** b"""
if b._denominator == 1 and b._numerator >= 0:
# If a is an int, keep it that way if possible.
return a ** b._numerator
if isinstance(a, numbers.Rational):
return Fraction(a.numerator, a.denominator) ** b
if b._denominator == 1:
return a ** b._numerator
return a ** float(b)
def __pos__(a):
"""+a: Coerces a subclass instance to Fraction"""
return Fraction(a._numerator, a._denominator)
def __neg__(a):
"""-a"""
return Fraction(-a._numerator, a._denominator)
def __abs__(a):
"""abs(a)"""
return Fraction(abs(a._numerator), a._denominator)
def __trunc__(a):
"""trunc(a)"""
if a._numerator < 0:
return -(-a._numerator // a._denominator)
else:
return a._numerator // a._denominator
def __floor__(a):
"""Will be math.floor(a) in 3.0."""
return a.numerator // a.denominator
def __ceil__(a):
"""Will be math.ceil(a) in 3.0."""
# The negations cleverly convince floordiv to return the ceiling.
return -(-a.numerator // a.denominator)
def __round__(self, ndigits=None):
"""Will be round(self, ndigits) in 3.0.
Rounds half toward even.
"""
if ndigits is None:
floor, remainder = divmod(self.numerator, self.denominator)
if remainder * 2 < self.denominator:
return floor
elif remainder * 2 > self.denominator:
return floor + 1
# Deal with the half case:
elif floor % 2 == 0:
return floor
else:
return floor + 1
shift = 10**abs(ndigits)
# See _operator_fallbacks.forward to check that the results of
# these operations will always be Fraction and therefore have
# round().
if ndigits > 0:
return Fraction(round(self * shift), shift)
else:
return Fraction(round(self / shift) * shift)
def __hash__(self):
"""hash(self)"""
# XXX since this method is expensive, consider caching the result
# In order to make sure that the hash of a Fraction agrees
# with the hash of a numerically equal integer, float or
# Decimal instance, we follow the rules for numeric hashes
# outlined in the documentation. (See library docs, 'Built-in
# Types').
# dinv is the inverse of self._denominator modulo the prime
# _PyHASH_MODULUS, or 0 if self._denominator is divisible by
# _PyHASH_MODULUS.
dinv = pow(self._denominator, _PyHASH_MODULUS - 2, _PyHASH_MODULUS)
if not dinv:
hash_ = _PyHASH_INF
else:
hash_ = abs(self._numerator) * dinv % _PyHASH_MODULUS
result = hash_ if self >= 0 else -hash_
return -2 if result == -1 else result
def __eq__(a, b):
"""a == b"""
if isinstance(b, numbers.Rational):
return (a._numerator == b.numerator and
a._denominator == b.denominator)
if isinstance(b, numbers.Complex) and b.imag == 0:
b = b.real
if isinstance(b, float):
if math.isnan(b) or math.isinf(b):
# comparisons with an infinity or nan should behave in
# the same way for any finite a, so treat a as zero.
return 0.0 == b
else:
return a == a.from_float(b)
else:
# Since a doesn't know how to compare with b, let's give b
# a chance to compare itself with a.
return NotImplemented
def _richcmp(self, other, op):
"""Helper for comparison operators, for internal use only.
Implement comparison between a Rational instance `self`, and
either another Rational instance or a float `other`. If
`other` is not a Rational instance or a float, return
NotImplemented. `op` should be one of the six standard
comparison operators.
"""
# convert other to a Rational instance where reasonable.
if isinstance(other, numbers.Rational):
return op(self._numerator * other.denominator,
self._denominator * other.numerator)
if isinstance(other, float):
if math.isnan(other) or math.isinf(other):
return op(0.0, other)
else:
return op(self, self.from_float(other))
else:
return NotImplemented
def __lt__(a, b):
"""a < b"""
return a._richcmp(b, operator.lt)
def __gt__(a, b):
"""a > b"""
return a._richcmp(b, operator.gt)
def __le__(a, b):
"""a <= b"""
return a._richcmp(b, operator.le)
def __ge__(a, b):
"""a >= b"""
return a._richcmp(b, operator.ge)
def __bool__(a):
"""a != 0"""
return a._numerator != 0
# support for pickling, copy, and deepcopy
def __reduce__(self):
return (self.__class__, (str(self),))
def __copy__(self):
if type(self) == Fraction:
return self # I'm immutable; therefore I am my own clone
return self.__class__(self._numerator, self._denominator)
def __deepcopy__(self, memo):
if type(self) == Fraction:
return self # My components are also immutable
return self.__class__(self._numerator, self._denominator)
|
ykaneko/quantum | refs/heads/master | quantum/tests/unit/test_linux_ip_lib.py | 3 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from quantum.agent.linux import ip_lib
from quantum.common import exceptions
from quantum.tests import base
NETNS_SAMPLE = [
'12345678-1234-5678-abcd-1234567890ab',
'bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb',
'cccccccc-cccc-cccc-cccc-cccccccccccc']
LINK_SAMPLE = [
'1: lo: <LOOPBACK,UP,LOWER_UP> mtu 16436 qdisc noqueue state UNKNOWN \\'
'link/loopback 00:00:00:00:00:00 brd 00:00:00:00:00:00',
'2: eth0: <BROADCAST,MULTICAST,UP,LOWER_UP> mtu 1500 qdisc mq state UP '
'qlen 1000\ link/ether cc:dd:ee:ff:ab:cd brd ff:ff:ff:ff:ff:ff'
'\ alias openvswitch',
'3: br-int: <BROADCAST,MULTICAST> mtu 1500 qdisc noop state DOWN '
'\ link/ether aa:bb:cc:dd:ee:ff brd ff:ff:ff:ff:ff:ff',
'4: gw-ddc717df-49: <BROADCAST,MULTICAST> mtu 1500 qdisc noop '
'state DOWN \ link/ether fe:dc:ba:fe:dc:ba brd ff:ff:ff:ff:ff:ff']
ADDR_SAMPLE = ("""
2: eth0: <BROADCAST,MULTICAST,UP,LOWER_UP> mtu 1500 qdisc mq state UP qlen 1000
link/ether dd:cc:aa:b9:76:ce brd ff:ff:ff:ff:ff:ff
inet 172.16.77.240/24 brd 172.16.77.255 scope global eth0
inet6 2001:470:9:1224:5595:dd51:6ba2:e788/64 scope global temporary dynamic
valid_lft 14187sec preferred_lft 3387sec
inet6 2001:470:9:1224:fd91:272:581e:3a32/64 scope global temporary """
"""deprecated dynamic
valid_lft 14187sec preferred_lft 0sec
inet6 2001:470:9:1224:4508:b885:5fb:740b/64 scope global temporary """
"""deprecated dynamic
valid_lft 14187sec preferred_lft 0sec
inet6 2001:470:9:1224:dfcc:aaff:feb9:76ce/64 scope global dynamic
valid_lft 14187sec preferred_lft 3387sec
inet6 fe80::dfcc:aaff:feb9:76ce/64 scope link
valid_lft forever preferred_lft forever
""")
ADDR_SAMPLE2 = ("""
2: eth0: <BROADCAST,MULTICAST,UP,LOWER_UP> mtu 1500 qdisc mq state UP qlen 1000
link/ether dd:cc:aa:b9:76:ce brd ff:ff:ff:ff:ff:ff
inet 172.16.77.240/24 scope global eth0
inet6 2001:470:9:1224:5595:dd51:6ba2:e788/64 scope global temporary dynamic
valid_lft 14187sec preferred_lft 3387sec
inet6 2001:470:9:1224:fd91:272:581e:3a32/64 scope global temporary """
"""deprecated dynamic
valid_lft 14187sec preferred_lft 0sec
inet6 2001:470:9:1224:4508:b885:5fb:740b/64 scope global temporary """
"""deprecated dynamic
valid_lft 14187sec preferred_lft 0sec
inet6 2001:470:9:1224:dfcc:aaff:feb9:76ce/64 scope global dynamic
valid_lft 14187sec preferred_lft 3387sec
inet6 fe80::dfcc:aaff:feb9:76ce/64 scope link
valid_lft forever preferred_lft forever
""")
GATEWAY_SAMPLE1 = ("""
default via 10.35.19.254 metric 100
10.35.16.0/22 proto kernel scope link src 10.35.17.97
""")
GATEWAY_SAMPLE2 = ("""
default via 10.35.19.254 metric 100
""")
GATEWAY_SAMPLE3 = ("""
10.35.16.0/22 proto kernel scope link src 10.35.17.97
""")
GATEWAY_SAMPLE4 = ("""
default via 10.35.19.254
""")
DEVICE_ROUTE_SAMPLE = ("10.0.0.0/24 scope link src 10.0.0.2")
SUBNET_SAMPLE1 = ("10.0.0.0/24 dev qr-23380d11-d2 scope link src 10.0.0.1\n"
"10.0.0.0/24 dev tap1d7888a7-10 scope link src 10.0.0.2")
SUBNET_SAMPLE2 = ("10.0.0.0/24 dev tap1d7888a7-10 scope link src 10.0.0.2\n"
"10.0.0.0/24 dev qr-23380d11-d2 scope link src 10.0.0.1")
class TestSubProcessBase(base.BaseTestCase):
def setUp(self):
super(TestSubProcessBase, self).setUp()
self.execute_p = mock.patch('quantum.agent.linux.utils.execute')
self.execute = self.execute_p.start()
self.addCleanup(self.execute_p.stop)
def test_execute_wrapper(self):
ip_lib.SubProcessBase._execute('o', 'link', ('list',), 'sudo')
self.execute.assert_called_once_with(['ip', '-o', 'link', 'list'],
root_helper='sudo')
def test_execute_wrapper_int_options(self):
ip_lib.SubProcessBase._execute([4], 'link', ('list',))
self.execute.assert_called_once_with(['ip', '-4', 'link', 'list'],
root_helper=None)
def test_execute_wrapper_no_options(self):
ip_lib.SubProcessBase._execute([], 'link', ('list',))
self.execute.assert_called_once_with(['ip', 'link', 'list'],
root_helper=None)
def test_run_no_namespace(self):
base = ip_lib.SubProcessBase('sudo')
base._run([], 'link', ('list',))
self.execute.assert_called_once_with(['ip', 'link', 'list'],
root_helper=None)
def test_run_namespace(self):
base = ip_lib.SubProcessBase('sudo', 'ns')
base._run([], 'link', ('list',))
self.execute.assert_called_once_with(['ip', 'netns', 'exec', 'ns',
'ip', 'link', 'list'],
root_helper='sudo')
def test_as_root_namespace(self):
base = ip_lib.SubProcessBase('sudo', 'ns')
base._as_root([], 'link', ('list',))
self.execute.assert_called_once_with(['ip', 'netns', 'exec', 'ns',
'ip', 'link', 'list'],
root_helper='sudo')
def test_as_root_no_root_helper(self):
base = ip_lib.SubProcessBase()
self.assertRaises(exceptions.SudoRequired,
base._as_root,
[], 'link', ('list',))
class TestIpWrapper(base.BaseTestCase):
def setUp(self):
super(TestIpWrapper, self).setUp()
self.execute_p = mock.patch.object(ip_lib.IPWrapper, '_execute')
self.execute = self.execute_p.start()
self.addCleanup(self.execute_p.stop)
def test_get_devices(self):
self.execute.return_value = '\n'.join(LINK_SAMPLE)
retval = ip_lib.IPWrapper('sudo').get_devices()
self.assertEqual(retval,
[ip_lib.IPDevice('lo'),
ip_lib.IPDevice('eth0'),
ip_lib.IPDevice('br-int'),
ip_lib.IPDevice('gw-ddc717df-49')])
self.execute.assert_called_once_with('o', 'link', ('list',),
'sudo', None)
def test_get_devices_malformed_line(self):
self.execute.return_value = '\n'.join(LINK_SAMPLE + ['gibberish'])
retval = ip_lib.IPWrapper('sudo').get_devices()
self.assertEqual(retval,
[ip_lib.IPDevice('lo'),
ip_lib.IPDevice('eth0'),
ip_lib.IPDevice('br-int'),
ip_lib.IPDevice('gw-ddc717df-49')])
self.execute.assert_called_once_with('o', 'link', ('list',),
'sudo', None)
def test_get_namespaces(self):
self.execute.return_value = '\n'.join(NETNS_SAMPLE)
retval = ip_lib.IPWrapper.get_namespaces('sudo')
self.assertEqual(retval,
['12345678-1234-5678-abcd-1234567890ab',
'bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb',
'cccccccc-cccc-cccc-cccc-cccccccccccc'])
self.execute.assert_called_once_with('', 'netns', ('list',),
root_helper='sudo')
def test_add_tuntap(self):
ip_lib.IPWrapper('sudo').add_tuntap('tap0')
self.execute.assert_called_once_with('', 'tuntap',
('add', 'tap0', 'mode', 'tap'),
'sudo', None)
def test_add_veth(self):
ip_lib.IPWrapper('sudo').add_veth('tap0', 'tap1')
self.execute.assert_called_once_with('', 'link',
('add', 'tap0', 'type', 'veth',
'peer', 'name', 'tap1'),
'sudo', None)
def test_add_veth_with_namespaces(self):
ns2 = 'ns2'
with mock.patch.object(ip_lib.IPWrapper, 'ensure_namespace') as en:
ip_lib.IPWrapper('sudo').add_veth('tap0', 'tap1', namespace2=ns2)
en.assert_has_calls([mock.call(ns2)])
self.execute.assert_called_once_with('', 'link',
('add', 'tap0', 'type', 'veth',
'peer', 'name', 'tap1',
'netns', ns2),
'sudo', None)
def test_get_device(self):
dev = ip_lib.IPWrapper('sudo', 'ns').device('eth0')
self.assertEqual(dev.root_helper, 'sudo')
self.assertEqual(dev.namespace, 'ns')
self.assertEqual(dev.name, 'eth0')
def test_ensure_namespace(self):
with mock.patch.object(ip_lib, 'IPDevice') as ip_dev:
ip = ip_lib.IPWrapper('sudo')
with mock.patch.object(ip.netns, 'exists') as ns_exists:
ns_exists.return_value = False
ip.ensure_namespace('ns')
self.execute.assert_has_calls(
[mock.call([], 'netns', ('add', 'ns'), 'sudo', None)])
ip_dev.assert_has_calls([mock.call('lo', 'sudo', 'ns'),
mock.call().link.set_up()])
def test_ensure_namespace_existing(self):
with mock.patch.object(ip_lib, 'IpNetnsCommand') as ip_ns_cmd:
ip_ns_cmd.exists.return_value = True
ns = ip_lib.IPWrapper('sudo').ensure_namespace('ns')
self.assertFalse(self.execute.called)
self.assertEqual(ns.namespace, 'ns')
def test_namespace_is_empty_no_devices(self):
ip = ip_lib.IPWrapper('sudo', 'ns')
with mock.patch.object(ip, 'get_devices') as get_devices:
get_devices.return_value = []
self.assertTrue(ip.namespace_is_empty())
get_devices.assert_called_once_with(exclude_loopback=True)
def test_namespace_is_empty(self):
ip = ip_lib.IPWrapper('sudo', 'ns')
with mock.patch.object(ip, 'get_devices') as get_devices:
get_devices.return_value = [mock.Mock()]
self.assertFalse(ip.namespace_is_empty())
get_devices.assert_called_once_with(exclude_loopback=True)
def test_garbage_collect_namespace_does_not_exist(self):
with mock.patch.object(ip_lib, 'IpNetnsCommand') as ip_ns_cmd_cls:
ip_ns_cmd_cls.return_value.exists.return_value = False
ip = ip_lib.IPWrapper('sudo', 'ns')
with mock.patch.object(ip, 'namespace_is_empty') as mock_is_empty:
self.assertFalse(ip.garbage_collect_namespace())
ip_ns_cmd_cls.assert_has_calls([mock.call().exists('ns')])
self.assertNotIn(mock.call().delete('ns'),
ip_ns_cmd_cls.return_value.mock_calls)
self.assertEqual(mock_is_empty.mock_calls, [])
def test_garbage_collect_namespace_existing_empty_ns(self):
with mock.patch.object(ip_lib, 'IpNetnsCommand') as ip_ns_cmd_cls:
ip_ns_cmd_cls.return_value.exists.return_value = True
ip = ip_lib.IPWrapper('sudo', 'ns')
with mock.patch.object(ip, 'namespace_is_empty') as mock_is_empty:
mock_is_empty.return_value = True
self.assertTrue(ip.garbage_collect_namespace())
mock_is_empty.assert_called_once_with()
expected = [mock.call().exists('ns'),
mock.call().delete('ns')]
ip_ns_cmd_cls.assert_has_calls(expected)
def test_garbage_collect_namespace_existing_not_empty(self):
lo_device = mock.Mock()
lo_device.name = 'lo'
tap_device = mock.Mock()
tap_device.name = 'tap1'
with mock.patch.object(ip_lib, 'IpNetnsCommand') as ip_ns_cmd_cls:
ip_ns_cmd_cls.return_value.exists.return_value = True
ip = ip_lib.IPWrapper('sudo', 'ns')
with mock.patch.object(ip, 'namespace_is_empty') as mock_is_empty:
mock_is_empty.return_value = False
self.assertFalse(ip.garbage_collect_namespace())
mock_is_empty.assert_called_once_with()
expected = [mock.call(ip),
mock.call().exists('ns')]
self.assertEqual(ip_ns_cmd_cls.mock_calls, expected)
self.assertNotIn(mock.call().delete('ns'),
ip_ns_cmd_cls.mock_calls)
def test_add_device_to_namespace(self):
dev = mock.Mock()
ip_lib.IPWrapper('sudo', 'ns').add_device_to_namespace(dev)
dev.assert_has_calls([mock.call.link.set_netns('ns')])
def test_add_device_to_namespace_is_none(self):
dev = mock.Mock()
ip_lib.IPWrapper('sudo').add_device_to_namespace(dev)
self.assertEqual(dev.mock_calls, [])
class TestIPDevice(base.BaseTestCase):
def test_eq_same_name(self):
dev1 = ip_lib.IPDevice('tap0')
dev2 = ip_lib.IPDevice('tap0')
self.assertEqual(dev1, dev2)
def test_eq_diff_name(self):
dev1 = ip_lib.IPDevice('tap0')
dev2 = ip_lib.IPDevice('tap1')
self.assertNotEqual(dev1, dev2)
def test_eq_same_namespace(self):
dev1 = ip_lib.IPDevice('tap0', 'ns1')
dev2 = ip_lib.IPDevice('tap0', 'ns1')
self.assertEqual(dev1, dev2)
def test_eq_diff_namespace(self):
dev1 = ip_lib.IPDevice('tap0', 'sudo', 'ns1')
dev2 = ip_lib.IPDevice('tap0', 'sudo', 'ns2')
self.assertNotEqual(dev1, dev2)
def test_eq_other_is_none(self):
dev1 = ip_lib.IPDevice('tap0', 'sudo', 'ns1')
self.assertNotEqual(dev1, None)
def test_str(self):
self.assertEqual(str(ip_lib.IPDevice('tap0')), 'tap0')
class TestIPCommandBase(base.BaseTestCase):
def setUp(self):
super(TestIPCommandBase, self).setUp()
self.ip = mock.Mock()
self.ip.root_helper = 'sudo'
self.ip.namespace = 'namespace'
self.ip_cmd = ip_lib.IpCommandBase(self.ip)
self.ip_cmd.COMMAND = 'foo'
def test_run(self):
self.ip_cmd._run('link', 'show')
self.ip.assert_has_calls([mock.call._run([], 'foo', ('link', 'show'))])
def test_run_with_options(self):
self.ip_cmd._run('link', options='o')
self.ip.assert_has_calls([mock.call._run('o', 'foo', ('link', ))])
def test_as_root(self):
self.ip_cmd._as_root('link')
self.ip.assert_has_calls(
[mock.call._as_root([], 'foo', ('link', ), False)])
def test_as_root_with_options(self):
self.ip_cmd._as_root('link', options='o')
self.ip.assert_has_calls(
[mock.call._as_root('o', 'foo', ('link', ), False)])
class TestIPDeviceCommandBase(base.BaseTestCase):
def setUp(self):
super(TestIPDeviceCommandBase, self).setUp()
self.ip_dev = mock.Mock()
self.ip_dev.name = 'eth0'
self.ip_dev.root_helper = 'sudo'
self.ip_dev._execute = mock.Mock(return_value='executed')
self.ip_cmd = ip_lib.IpDeviceCommandBase(self.ip_dev)
self.ip_cmd.COMMAND = 'foo'
def test_name_property(self):
self.assertEqual(self.ip_cmd.name, 'eth0')
class TestIPCmdBase(base.BaseTestCase):
def setUp(self):
super(TestIPCmdBase, self).setUp()
self.parent = mock.Mock()
self.parent.name = 'eth0'
self.parent.root_helper = 'sudo'
def _assert_call(self, options, args):
self.parent.assert_has_calls([
mock.call._run(options, self.command, args)])
def _assert_sudo(self, options, args, force_root_namespace=False):
self.parent.assert_has_calls(
[mock.call._as_root(options, self.command, args,
force_root_namespace)])
class TestIpLinkCommand(TestIPCmdBase):
def setUp(self):
super(TestIpLinkCommand, self).setUp()
self.parent._run.return_value = LINK_SAMPLE[1]
self.command = 'link'
self.link_cmd = ip_lib.IpLinkCommand(self.parent)
def test_set_address(self):
self.link_cmd.set_address('aa:bb:cc:dd:ee:ff')
self._assert_sudo([], ('set', 'eth0', 'address', 'aa:bb:cc:dd:ee:ff'))
def test_set_mtu(self):
self.link_cmd.set_mtu(1500)
self._assert_sudo([], ('set', 'eth0', 'mtu', 1500))
def test_set_up(self):
self.link_cmd.set_up()
self._assert_sudo([], ('set', 'eth0', 'up'))
def test_set_down(self):
self.link_cmd.set_down()
self._assert_sudo([], ('set', 'eth0', 'down'))
def test_set_netns(self):
self.link_cmd.set_netns('foo')
self._assert_sudo([], ('set', 'eth0', 'netns', 'foo'))
self.assertEqual(self.parent.namespace, 'foo')
def test_set_name(self):
self.link_cmd.set_name('tap1')
self._assert_sudo([], ('set', 'eth0', 'name', 'tap1'))
self.assertEqual(self.parent.name, 'tap1')
def test_set_alias(self):
self.link_cmd.set_alias('openvswitch')
self._assert_sudo([], ('set', 'eth0', 'alias', 'openvswitch'))
def test_delete(self):
self.link_cmd.delete()
self._assert_sudo([], ('delete', 'eth0'))
def test_address_property(self):
self.parent._execute = mock.Mock(return_value=LINK_SAMPLE[1])
self.assertEqual(self.link_cmd.address, 'cc:dd:ee:ff:ab:cd')
def test_mtu_property(self):
self.parent._execute = mock.Mock(return_value=LINK_SAMPLE[1])
self.assertEqual(self.link_cmd.mtu, 1500)
def test_qdisc_property(self):
self.parent._execute = mock.Mock(return_value=LINK_SAMPLE[1])
self.assertEqual(self.link_cmd.qdisc, 'mq')
def test_qlen_property(self):
self.parent._execute = mock.Mock(return_value=LINK_SAMPLE[1])
self.assertEqual(self.link_cmd.qlen, 1000)
def test_alias_property(self):
self.parent._execute = mock.Mock(return_value=LINK_SAMPLE[1])
self.assertEqual(self.link_cmd.alias, 'openvswitch')
def test_state_property(self):
self.parent._execute = mock.Mock(return_value=LINK_SAMPLE[1])
self.assertEqual(self.link_cmd.state, 'UP')
def test_settings_property(self):
expected = {'mtu': 1500,
'qlen': 1000,
'state': 'UP',
'qdisc': 'mq',
'brd': 'ff:ff:ff:ff:ff:ff',
'link/ether': 'cc:dd:ee:ff:ab:cd',
'alias': 'openvswitch'}
self.parent._execute = mock.Mock(return_value=LINK_SAMPLE[1])
self.assertEqual(self.link_cmd.attributes, expected)
self._assert_call('o', ('show', 'eth0'))
class TestIpAddrCommand(TestIPCmdBase):
def setUp(self):
super(TestIpAddrCommand, self).setUp()
self.parent.name = 'tap0'
self.command = 'addr'
self.addr_cmd = ip_lib.IpAddrCommand(self.parent)
def test_add_address(self):
self.addr_cmd.add(4, '192.168.45.100/24', '192.168.45.255')
self._assert_sudo([4],
('add', '192.168.45.100/24', 'brd', '192.168.45.255',
'scope', 'global', 'dev', 'tap0'))
def test_add_address_scoped(self):
self.addr_cmd.add(4, '192.168.45.100/24', '192.168.45.255',
scope='link')
self._assert_sudo([4],
('add', '192.168.45.100/24', 'brd', '192.168.45.255',
'scope', 'link', 'dev', 'tap0'))
def test_del_address(self):
self.addr_cmd.delete(4, '192.168.45.100/24')
self._assert_sudo([4],
('del', '192.168.45.100/24', 'dev', 'tap0'))
def test_flush(self):
self.addr_cmd.flush()
self._assert_sudo([], ('flush', 'tap0'))
def test_list(self):
expected = [
dict(ip_version=4, scope='global',
dynamic=False, cidr='172.16.77.240/24',
broadcast='172.16.77.255'),
dict(ip_version=6, scope='global',
dynamic=True, cidr='2001:470:9:1224:5595:dd51:6ba2:e788/64',
broadcast='::'),
dict(ip_version=6, scope='global',
dynamic=True, cidr='2001:470:9:1224:fd91:272:581e:3a32/64',
broadcast='::'),
dict(ip_version=6, scope='global',
dynamic=True, cidr='2001:470:9:1224:4508:b885:5fb:740b/64',
broadcast='::'),
dict(ip_version=6, scope='global',
dynamic=True, cidr='2001:470:9:1224:dfcc:aaff:feb9:76ce/64',
broadcast='::'),
dict(ip_version=6, scope='link',
dynamic=False, cidr='fe80::dfcc:aaff:feb9:76ce/64',
broadcast='::')]
test_cases = [ADDR_SAMPLE, ADDR_SAMPLE2]
for test_case in test_cases:
self.parent._run = mock.Mock(return_value=test_case)
self.assertEqual(self.addr_cmd.list(), expected)
self._assert_call([], ('show', 'tap0'))
def test_list_filtered(self):
expected = [
dict(ip_version=4, scope='global',
dynamic=False, cidr='172.16.77.240/24',
broadcast='172.16.77.255')]
test_cases = [ADDR_SAMPLE, ADDR_SAMPLE2]
for test_case in test_cases:
output = '\n'.join(test_case.split('\n')[0:4])
self.parent._run.return_value = output
self.assertEqual(self.addr_cmd.list('global',
filters=['permanent']), expected)
self._assert_call([], ('show', 'tap0', 'permanent', 'scope',
'global'))
class TestIpRouteCommand(TestIPCmdBase):
def setUp(self):
super(TestIpRouteCommand, self).setUp()
self.parent.name = 'eth0'
self.command = 'route'
self.route_cmd = ip_lib.IpRouteCommand(self.parent)
def test_add_gateway(self):
gateway = '192.168.45.100'
metric = 100
self.route_cmd.add_gateway(gateway, metric)
self._assert_sudo([],
('replace', 'default', 'via', gateway,
'metric', metric,
'dev', self.parent.name))
def test_del_gateway(self):
gateway = '192.168.45.100'
self.route_cmd.delete_gateway(gateway)
self._assert_sudo([],
('del', 'default', 'via', gateway,
'dev', self.parent.name))
def test_get_gateway(self):
test_cases = [{'sample': GATEWAY_SAMPLE1,
'expected': {'gateway': '10.35.19.254',
'metric': 100}},
{'sample': GATEWAY_SAMPLE2,
'expected': {'gateway': '10.35.19.254',
'metric': 100}},
{'sample': GATEWAY_SAMPLE3,
'expected': None},
{'sample': GATEWAY_SAMPLE4,
'expected': {'gateway': '10.35.19.254'}}]
for test_case in test_cases:
self.parent._run = mock.Mock(return_value=test_case['sample'])
self.assertEqual(self.route_cmd.get_gateway(),
test_case['expected'])
def test_pullup_route(self):
# interface is not the first in the list - requires
# deleting and creating existing entries
output = [DEVICE_ROUTE_SAMPLE, SUBNET_SAMPLE1]
def pullup_side_effect(self, *args):
result = output.pop(0)
return result
self.parent._run = mock.Mock(side_effect=pullup_side_effect)
self.route_cmd.pullup_route('tap1d7888a7-10')
self._assert_sudo([], ('del', '10.0.0.0/24', 'dev', 'qr-23380d11-d2'))
self._assert_sudo([], ('append', '10.0.0.0/24', 'proto', 'kernel',
'src', '10.0.0.1', 'dev', 'qr-23380d11-d2'))
def test_pullup_route_first(self):
# interface is first in the list - no changes
output = [DEVICE_ROUTE_SAMPLE, SUBNET_SAMPLE2]
def pullup_side_effect(self, *args):
result = output.pop(0)
return result
self.parent._run = mock.Mock(side_effect=pullup_side_effect)
self.route_cmd.pullup_route('tap1d7888a7-10')
# Check two calls - device get and subnet get
self.assertEqual(len(self.parent._run.mock_calls), 2)
class TestIpNetnsCommand(TestIPCmdBase):
def setUp(self):
super(TestIpNetnsCommand, self).setUp()
self.command = 'netns'
self.netns_cmd = ip_lib.IpNetnsCommand(self.parent)
def test_add_namespace(self):
ns = self.netns_cmd.add('ns')
self._assert_sudo([], ('add', 'ns'), force_root_namespace=True)
self.assertEqual(ns.namespace, 'ns')
def test_delete_namespace(self):
with mock.patch('quantum.agent.linux.utils.execute'):
self.netns_cmd.delete('ns')
self._assert_sudo([], ('delete', 'ns'), force_root_namespace=True)
def test_namespace_exists(self):
retval = '\n'.join(NETNS_SAMPLE)
self.parent._as_root.return_value = retval
self.assertTrue(
self.netns_cmd.exists('bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb'))
self._assert_sudo('o', ('list',), force_root_namespace=True)
def test_namespace_doest_not_exist(self):
retval = '\n'.join(NETNS_SAMPLE)
self.parent._as_root.return_value = retval
self.assertFalse(
self.netns_cmd.exists('bbbbbbbb-1111-2222-3333-bbbbbbbbbbbb'))
self._assert_sudo('o', ('list',), force_root_namespace=True)
def test_execute(self):
self.parent.namespace = 'ns'
with mock.patch('quantum.agent.linux.utils.execute') as execute:
self.netns_cmd.execute(['ip', 'link', 'list'])
execute.assert_called_once_with(['ip', 'netns', 'exec', 'ns', 'ip',
'link', 'list'],
root_helper='sudo',
check_exit_code=True)
def test_execute_env_var_prepend(self):
self.parent.namespace = 'ns'
with mock.patch('quantum.agent.linux.utils.execute') as execute:
env = dict(FOO=1, BAR=2)
self.netns_cmd.execute(['ip', 'link', 'list'], env)
execute.assert_called_once_with(
['FOO=1', 'BAR=2', 'ip', 'netns', 'exec', 'ns', 'ip', 'link',
'list'],
root_helper='sudo', check_exit_code=True)
class TestDeviceExists(base.BaseTestCase):
def test_device_exists(self):
with mock.patch.object(ip_lib.IPDevice, '_execute') as _execute:
_execute.return_value = LINK_SAMPLE[1]
self.assertTrue(ip_lib.device_exists('eth0'))
_execute.assert_called_once_with('o', 'link', ('show', 'eth0'))
def test_device_does_not_exist(self):
with mock.patch.object(ip_lib.IPDevice, '_execute') as _execute:
_execute.return_value = ''
_execute.side_effect = RuntimeError
self.assertFalse(ip_lib.device_exists('eth0'))
|
waddedMeat/ember-proxy-example | refs/heads/master | test-app/node_modules/ember-cli/node_modules/npm/node_modules/node-gyp/gyp/tools/graphviz.py | 2679 | #!/usr/bin/env python
# Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Using the JSON dumped by the dump-dependency-json generator,
generate input suitable for graphviz to render a dependency graph of
targets."""
import collections
import json
import sys
def ParseTarget(target):
target, _, suffix = target.partition('#')
filename, _, target = target.partition(':')
return filename, target, suffix
def LoadEdges(filename, targets):
"""Load the edges map from the dump file, and filter it to only
show targets in |targets| and their depedendents."""
file = open('dump.json')
edges = json.load(file)
file.close()
# Copy out only the edges we're interested in from the full edge list.
target_edges = {}
to_visit = targets[:]
while to_visit:
src = to_visit.pop()
if src in target_edges:
continue
target_edges[src] = edges[src]
to_visit.extend(edges[src])
return target_edges
def WriteGraph(edges):
"""Print a graphviz graph to stdout.
|edges| is a map of target to a list of other targets it depends on."""
# Bucket targets by file.
files = collections.defaultdict(list)
for src, dst in edges.items():
build_file, target_name, toolset = ParseTarget(src)
files[build_file].append(src)
print 'digraph D {'
print ' fontsize=8' # Used by subgraphs.
print ' node [fontsize=8]'
# Output nodes by file. We must first write out each node within
# its file grouping before writing out any edges that may refer
# to those nodes.
for filename, targets in files.items():
if len(targets) == 1:
# If there's only one node for this file, simplify
# the display by making it a box without an internal node.
target = targets[0]
build_file, target_name, toolset = ParseTarget(target)
print ' "%s" [shape=box, label="%s\\n%s"]' % (target, filename,
target_name)
else:
# Group multiple nodes together in a subgraph.
print ' subgraph "cluster_%s" {' % filename
print ' label = "%s"' % filename
for target in targets:
build_file, target_name, toolset = ParseTarget(target)
print ' "%s" [label="%s"]' % (target, target_name)
print ' }'
# Now that we've placed all the nodes within subgraphs, output all
# the edges between nodes.
for src, dsts in edges.items():
for dst in dsts:
print ' "%s" -> "%s"' % (src, dst)
print '}'
def main():
if len(sys.argv) < 2:
print >>sys.stderr, __doc__
print >>sys.stderr
print >>sys.stderr, 'usage: %s target1 target2...' % (sys.argv[0])
return 1
edges = LoadEdges('dump.json', sys.argv[1:])
WriteGraph(edges)
return 0
if __name__ == '__main__':
sys.exit(main())
|
avanov/Rhetoric | refs/heads/develop | tests/testapp/manage.py | 57 | #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "testapp.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
waynesun09/tp-libvirt | refs/heads/master | libvirt/tests/src/virsh_cmd/domain/virsh_metadata.py | 4 | import logging
import xml.dom.minidom
import aexpect
from autotest.client.shared import error
from virttest import remote
from virttest import virsh
from virttest.utils_test import libvirt as utlv
from virttest.utils_libvirtd import Libvirtd
from virttest.libvirt_xml import vm_xml
def run(test, params, env):
"""
Test the command virsh metadata
Run in 4 steps:
1. Set domain metadata
2. Get domain metadata
3. Restart libvirtd then get domain metadata again
4. Remove domain metadata then get domain metadata again
"""
vm_name = params.get("main_vm")
vm = env.get_vm(vm_name)
metadata_uri = params.get("metadata_uri")
metadata_key = params.get("metadata_key")
metadata_value = params.get("metadata_value", "")
metadata_option = params.get("metadata_option", "")
virsh_dargs = {'debug': True, 'ignore_status': True}
metadata_set = "yes" == params.get("metadata_set", "no")
metadata_get = "yes" == params.get("metadata_get", "yes")
metadata_remove = "yes" == params.get("metadata_remove", "no")
restart_libvirtd = "yes" == params.get("restart_libvirtd", "no")
status_error = "yes" == params.get("status_error", "no")
if not metadata_uri:
raise error.TestErrorr("'uri' is needed")
vmxml = vm_xml.VMXML.new_from_dumpxml(vm_name)
# Start VM
if vm.state() != "running":
vm.destroy()
vm.start()
def pretty_xml(xml_str):
return xml.dom.minidom.parseString(xml_str).toprettyxml()
def check_result(result, expect_status, expect_output=None):
"""
Check virsh metadata command
"""
utlv.check_exit_status(result, expect_status)
if result.exit_status == 0 and expect_output:
expect_output = pretty_xml(expect_output)
logging.debug("Expect metadata: %s", expect_output)
output = result.stdout.strip()
output = pretty_xml(output)
logging.debug("Command get metadata: %s", output)
if output != expect_output:
raise error.TestFail("Metadat is not expected")
def get_metadata(metadata_option=""):
"""
Get domain metadata
"""
option = metadata_option.replace("--edit", "")
result = virsh.metadata(vm_name,
metadata_uri,
options=option,
key=metadata_key,
**virsh_dargs)
return result
try:
# Set metadata XML
if metadata_set:
if not metadata_key:
raise error.TestErrorr("'key' is needed")
if not metadata_value:
raise error.TestErrorr("New metadata is needed")
# Parse metadata value
if "--edit" in metadata_option:
virsh_cmd = r"virsh metadata %s --uri %s --key %s %s"
virsh_cmd = virsh_cmd % (vm_name, metadata_uri,
metadata_key, metadata_option)
session = aexpect.ShellSession("sudo -s")
logging.info("Running command: %s", virsh_cmd)
try:
session.sendline(virsh_cmd)
session.sendline(r":insert")
session.sendline(metadata_value)
session.sendline(".")
session.send('ZZ')
remote.handle_prompts(session, None, None, r"[\#\$]\s*$",
debug=True)
except Exception, e:
logging.error("Error occured: %s", e)
session.close()
else:
result = virsh.metadata(vm_name,
metadata_uri,
options=metadata_option,
key=metadata_key,
new_metadata=metadata_value,
**virsh_dargs)
check_result(result, status_error)
# Get metadata
for option in metadata_option.split():
if option == "--config":
vm.destroy()
vm.start()
check_result(get_metadata(metadata_option=option), status_error, metadata_value)
elif metadata_get:
check_result(get_metadata(metadata_option=option), status_error, metadata_value)
# Restart libvirtd:
if restart_libvirtd:
libvirtd = Libvirtd()
libvirtd.restart()
# Get metadata again
for option in metadata_option.split():
check_result(get_metadata(metadata_option=option), status_error, metadata_value)
# Remove metadata
if metadata_remove:
remove_option = metadata_option.replace("--edit", "")
remove_option += " --remove"
result = virsh.metadata(vm_name,
metadata_uri,
options=remove_option,
key=metadata_key,
**virsh_dargs)
check_result(result, status_error)
# Get metadata again
for option in metadata_option.split():
check_result(get_metadata(metadata_option=option), True)
finally:
vmxml.sync()
|
mikalstill/nova | refs/heads/master | nova/tests/unit/api/openstack/compute/test_cloudpipe.py | 3 | # Copyright 2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_utils import uuidutils
from webob import exc
from nova.api.openstack.compute import cloudpipe as cloudpipe_v21
from nova import test
from nova.tests.unit.api.openstack import fakes
project_id = uuidutils.generate_uuid(dashed=False)
class CloudpipeTestV21(test.NoDBTestCase):
cloudpipe = cloudpipe_v21
url = '/v2/fake/os-cloudpipe'
def setUp(self):
super(CloudpipeTestV21, self).setUp()
self.controller = self.cloudpipe.CloudpipeController()
self.req = fakes.HTTPRequest.blank('')
def test_cloudpipe_list(self):
self.assertRaises(exc.HTTPGone, self.controller.index, self.req)
def test_cloudpipe_create(self):
body = {'cloudpipe': {'project_id': project_id}}
self.assertRaises(exc.HTTPGone, self.controller.create,
self.req, body=body)
def test_cloudpipe_configure_project(self):
body = {"configure_project": {"vpn_ip": "1.2.3.4", "vpn_port": 222}}
self.assertRaises(exc.HTTPGone, self.controller.update,
self.req, 'configure-project', body=body)
|
ikargis/horizon_fod | refs/heads/master | openstack_dashboard/dashboards/project/databases/urls.py | 9 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright 2013 Rackspace Hosting
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.conf.urls import patterns # noqa
from django.conf.urls import url # noqa
from openstack_dashboard.dashboards.project.databases import views
urlpatterns = patterns(
'',
url(r'^$', views.IndexView.as_view(), name='index'),
url(r'^launch$', views.LaunchInstanceView.as_view(), name='launch'),
url(r'^(?P<instance_id>[^/]+)/$', views.DetailView.as_view(),
name='detail'),
)
|
rockyzhang/zhangyanhit-python-for-android-mips | refs/heads/master | python-modules/twisted/twisted/conch/checkers.py | 59 | # -*- test-case-name: twisted.conch.test.test_checkers -*-
# Copyright (c) 2001-2010 Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Provide L{ICredentialsChecker} implementations to be used in Conch protocols.
"""
import os, base64, binascii, errno
try:
import pwd
except ImportError:
pwd = None
else:
import crypt
try:
# get this from http://www.twistedmatrix.com/users/z3p/files/pyshadow-0.2.tar.gz
import shadow
except:
shadow = None
try:
from twisted.cred import pamauth
except ImportError:
pamauth = None
from zope.interface import implements, providedBy
from twisted.conch import error
from twisted.conch.ssh import keys
from twisted.cred.checkers import ICredentialsChecker
from twisted.cred.credentials import IUsernamePassword, ISSHPrivateKey
from twisted.cred.error import UnauthorizedLogin, UnhandledCredentials
from twisted.internet import defer
from twisted.python import failure, reflect, log
from twisted.python.util import runAsEffectiveUser
from twisted.python.filepath import FilePath
def verifyCryptedPassword(crypted, pw):
if crypted[0] == '$': # md5_crypt encrypted
salt = '$1$' + crypted.split('$')[2]
else:
salt = crypted[:2]
return crypt.crypt(pw, salt) == crypted
class UNIXPasswordDatabase:
credentialInterfaces = IUsernamePassword,
implements(ICredentialsChecker)
def requestAvatarId(self, credentials):
if pwd:
try:
cryptedPass = pwd.getpwnam(credentials.username)[1]
except KeyError:
return defer.fail(UnauthorizedLogin("invalid username"))
else:
if cryptedPass not in ['*', 'x'] and \
verifyCryptedPassword(cryptedPass, credentials.password):
return defer.succeed(credentials.username)
if shadow:
gid = os.getegid()
uid = os.geteuid()
os.setegid(0)
os.seteuid(0)
try:
shadowPass = shadow.getspnam(credentials.username)[1]
except KeyError:
os.setegid(gid)
os.seteuid(uid)
return defer.fail(UnauthorizedLogin("invalid username"))
os.setegid(gid)
os.seteuid(uid)
if verifyCryptedPassword(shadowPass, credentials.password):
return defer.succeed(credentials.username)
return defer.fail(UnauthorizedLogin("invalid password"))
return defer.fail(UnauthorizedLogin("unable to verify password"))
class SSHPublicKeyDatabase:
"""
Checker that authenticates SSH public keys, based on public keys listed in
authorized_keys and authorized_keys2 files in user .ssh/ directories.
"""
credentialInterfaces = ISSHPrivateKey,
implements(ICredentialsChecker)
def requestAvatarId(self, credentials):
d = defer.maybeDeferred(self.checkKey, credentials)
d.addCallback(self._cbRequestAvatarId, credentials)
d.addErrback(self._ebRequestAvatarId)
return d
def _cbRequestAvatarId(self, validKey, credentials):
"""
Check whether the credentials themselves are valid, now that we know
if the key matches the user.
@param validKey: A boolean indicating whether or not the public key
matches a key in the user's authorized_keys file.
@param credentials: The credentials offered by the user.
@type credentials: L{ISSHPrivateKey} provider
@raise UnauthorizedLogin: (as a failure) if the key does not match the
user in C{credentials}. Also raised if the user provides an invalid
signature.
@raise ValidPublicKey: (as a failure) if the key matches the user but
the credentials do not include a signature. See
L{error.ValidPublicKey} for more information.
@return: The user's username, if authentication was successful.
"""
if not validKey:
return failure.Failure(UnauthorizedLogin("invalid key"))
if not credentials.signature:
return failure.Failure(error.ValidPublicKey())
else:
try:
pubKey = keys.Key.fromString(credentials.blob)
if pubKey.verify(credentials.signature, credentials.sigData):
return credentials.username
except: # any error should be treated as a failed login
log.err()
return failure.Failure(UnauthorizedLogin('error while verifying key'))
return failure.Failure(UnauthorizedLogin("unable to verify key"))
def getAuthorizedKeysFiles(self, credentials):
"""
Return a list of L{FilePath} instances for I{authorized_keys} files
which might contain information about authorized keys for the given
credentials.
On OpenSSH servers, the default location of the file containing the
list of authorized public keys is
U{$HOME/.ssh/authorized_keys<http://www.openbsd.org/cgi-bin/man.cgi?query=sshd_config>}.
I{$HOME/.ssh/authorized_keys2} is also returned, though it has been
U{deprecated by OpenSSH since
2001<http://marc.info/?m=100508718416162>}.
@return: A list of L{FilePath} instances to files with the authorized keys.
"""
pwent = pwd.getpwnam(credentials.username)
root = FilePath(pwent.pw_dir).child('.ssh')
files = ['authorized_keys', 'authorized_keys2']
return [root.child(f) for f in files]
def checkKey(self, credentials):
"""
Retrieve files containing authorized keys and check against user
credentials.
"""
uid, gid = os.geteuid(), os.getegid()
ouid, ogid = pwd.getpwnam(credentials.username)[2:4]
for filepath in self.getAuthorizedKeysFiles(credentials):
if not filepath.exists():
continue
try:
lines = filepath.open()
except IOError, e:
if e.errno == errno.EACCES:
lines = runAsEffectiveUser(ouid, ogid, filepath.open)
else:
raise
for l in lines:
l2 = l.split()
if len(l2) < 2:
continue
try:
if base64.decodestring(l2[1]) == credentials.blob:
return True
except binascii.Error:
continue
return False
def _ebRequestAvatarId(self, f):
if not f.check(UnauthorizedLogin):
log.msg(f)
return failure.Failure(UnauthorizedLogin("unable to get avatar id"))
return f
class SSHProtocolChecker:
"""
SSHProtocolChecker is a checker that requires multiple authentications
to succeed. To add a checker, call my registerChecker method with
the checker and the interface.
After each successful authenticate, I call my areDone method with the
avatar id. To get a list of the successful credentials for an avatar id,
use C{SSHProcotolChecker.successfulCredentials[avatarId]}. If L{areDone}
returns True, the authentication has succeeded.
"""
implements(ICredentialsChecker)
def __init__(self):
self.checkers = {}
self.successfulCredentials = {}
def get_credentialInterfaces(self):
return self.checkers.keys()
credentialInterfaces = property(get_credentialInterfaces)
def registerChecker(self, checker, *credentialInterfaces):
if not credentialInterfaces:
credentialInterfaces = checker.credentialInterfaces
for credentialInterface in credentialInterfaces:
self.checkers[credentialInterface] = checker
def requestAvatarId(self, credentials):
"""
Part of the L{ICredentialsChecker} interface. Called by a portal with
some credentials to check if they'll authenticate a user. We check the
interfaces that the credentials provide against our list of acceptable
checkers. If one of them matches, we ask that checker to verify the
credentials. If they're valid, we call our L{_cbGoodAuthentication}
method to continue.
@param credentials: the credentials the L{Portal} wants us to verify
"""
ifac = providedBy(credentials)
for i in ifac:
c = self.checkers.get(i)
if c is not None:
d = defer.maybeDeferred(c.requestAvatarId, credentials)
return d.addCallback(self._cbGoodAuthentication,
credentials)
return defer.fail(UnhandledCredentials("No checker for %s" % \
', '.join(map(reflect.qual, ifac))))
def _cbGoodAuthentication(self, avatarId, credentials):
"""
Called if a checker has verified the credentials. We call our
L{areDone} method to see if the whole of the successful authentications
are enough. If they are, we return the avatar ID returned by the first
checker.
"""
if avatarId not in self.successfulCredentials:
self.successfulCredentials[avatarId] = []
self.successfulCredentials[avatarId].append(credentials)
if self.areDone(avatarId):
del self.successfulCredentials[avatarId]
return avatarId
else:
raise error.NotEnoughAuthentication()
def areDone(self, avatarId):
"""
Override to determine if the authentication is finished for a given
avatarId.
@param avatarId: the avatar returned by the first checker. For
this checker to function correctly, all the checkers must
return the same avatar ID.
"""
return True
|
dpryan79/tools-iuc | refs/heads/master | data_managers/data_manager_bwameth_index_builder/data_manager/bwameth_index_builder.py | 8 | #!/usr/bin/env python
# Based heavily on the HISAT2 data manager wrapper
import argparse
import os
import subprocess
import sys
from json import dumps, loads
DEFAULT_DATA_TABLE_NAME = "bwameth_indexes"
def build_bwameth_index(data_manager_dict, params, args):
data_table_name = args.data_table_name
target_directory = params[ 'output_data' ][0]['extra_files_path']
if not os.path.exists( target_directory ):
os.mkdir( target_directory )
fasta_base_name = os.path.basename(args.fasta_filename)
sym_linked_fasta_filename = os.path.join(target_directory, fasta_base_name)
os.symlink(os.path.abspath(args.fasta_filename), sym_linked_fasta_filename)
cmd = ['bwameth.py', 'index', sym_linked_fasta_filename]
proc = subprocess.Popen(args=cmd, shell=False, cwd=target_directory)
return_code = proc.wait()
if return_code:
print >> sys.stderr, "Error building index."
sys.exit( return_code )
data_table_entry = dict(value=args.dbkey, dbkey=args.dbkey, name=args.name, path=sym_linked_fasta_filename)
_add_data_table_entry(data_manager_dict, data_table_name, data_table_entry)
def _add_data_table_entry( data_manager_dict, data_table_name, data_table_entry ):
data_manager_dict['data_tables'] = data_manager_dict.get( 'data_tables', {} )
data_manager_dict['data_tables'][ data_table_name ] = data_manager_dict['data_tables'].get( data_table_name, [] )
data_manager_dict['data_tables'][ data_table_name ].append( data_table_entry )
return data_manager_dict
def main():
# Parse Command Line
parser = argparse.ArgumentParser()
parser.add_argument( '--output', default=None )
parser.add_argument( '--fasta_filename', default=None )
parser.add_argument( '--dbkey', default=None )
parser.add_argument( '--name', default=None )
parser.add_argument( '--description', default=None )
parser.add_argument( '--data_table_name', default=DEFAULT_DATA_TABLE_NAME )
args = parser.parse_args()
filename = args.output
params = loads(open(filename).read())
data_manager_dict = {}
if args.dbkey in [ None, '', '?' ]:
raise Exception('"%s" is not a valid dbkey. You must specify a valid dbkey.' % (args.dbkey))
# build the index
build_bwameth_index(data_manager_dict, params, args)
# save info to json file
open(filename, 'wb').write(dumps(data_manager_dict))
if __name__ == "__main__":
main()
|
ChanduERP/odoo | refs/heads/8.0 | openerp/osv/__init__.py | 337 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import osv
import fields
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
enen92/script.matchcenter | refs/heads/master | resources/lib/__init__.py | 6 | # -*- coding: utf-8 -*-
'''
script.matchcenter - Football information for Kodi
A program addon that can be mapped to a key on your remote to display football information.
Livescores, Event details, Line-ups, League tables, next and previous matches by team. Follow what
others are saying about the match in twitter.
Copyright (C) 2016 enen92
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
''' |
faridani/pyDoc | refs/heads/master | Unidecode/unidecode/x00c.py | 252 | data = (
'[?]', # 0x00
'N', # 0x01
'N', # 0x02
'H', # 0x03
'[?]', # 0x04
'a', # 0x05
'aa', # 0x06
'i', # 0x07
'ii', # 0x08
'u', # 0x09
'uu', # 0x0a
'R', # 0x0b
'L', # 0x0c
'[?]', # 0x0d
'e', # 0x0e
'ee', # 0x0f
'ai', # 0x10
'[?]', # 0x11
'o', # 0x12
'oo', # 0x13
'au', # 0x14
'k', # 0x15
'kh', # 0x16
'g', # 0x17
'gh', # 0x18
'ng', # 0x19
'c', # 0x1a
'ch', # 0x1b
'j', # 0x1c
'jh', # 0x1d
'ny', # 0x1e
'tt', # 0x1f
'tth', # 0x20
'dd', # 0x21
'ddh', # 0x22
'nn', # 0x23
't', # 0x24
'th', # 0x25
'd', # 0x26
'dh', # 0x27
'n', # 0x28
'[?]', # 0x29
'p', # 0x2a
'ph', # 0x2b
'b', # 0x2c
'bh', # 0x2d
'm', # 0x2e
'y', # 0x2f
'r', # 0x30
'rr', # 0x31
'l', # 0x32
'll', # 0x33
'[?]', # 0x34
'v', # 0x35
'sh', # 0x36
'ss', # 0x37
's', # 0x38
'h', # 0x39
'[?]', # 0x3a
'[?]', # 0x3b
'[?]', # 0x3c
'[?]', # 0x3d
'aa', # 0x3e
'i', # 0x3f
'ii', # 0x40
'u', # 0x41
'uu', # 0x42
'R', # 0x43
'RR', # 0x44
'[?]', # 0x45
'e', # 0x46
'ee', # 0x47
'ai', # 0x48
'[?]', # 0x49
'o', # 0x4a
'oo', # 0x4b
'au', # 0x4c
'', # 0x4d
'[?]', # 0x4e
'[?]', # 0x4f
'[?]', # 0x50
'[?]', # 0x51
'[?]', # 0x52
'[?]', # 0x53
'[?]', # 0x54
'+', # 0x55
'+', # 0x56
'[?]', # 0x57
'[?]', # 0x58
'[?]', # 0x59
'[?]', # 0x5a
'[?]', # 0x5b
'[?]', # 0x5c
'[?]', # 0x5d
'[?]', # 0x5e
'[?]', # 0x5f
'RR', # 0x60
'LL', # 0x61
'[?]', # 0x62
'[?]', # 0x63
'[?]', # 0x64
'[?]', # 0x65
'0', # 0x66
'1', # 0x67
'2', # 0x68
'3', # 0x69
'4', # 0x6a
'5', # 0x6b
'6', # 0x6c
'7', # 0x6d
'8', # 0x6e
'9', # 0x6f
'[?]', # 0x70
'[?]', # 0x71
'[?]', # 0x72
'[?]', # 0x73
'[?]', # 0x74
'[?]', # 0x75
'[?]', # 0x76
'[?]', # 0x77
'[?]', # 0x78
'[?]', # 0x79
'[?]', # 0x7a
'[?]', # 0x7b
'[?]', # 0x7c
'[?]', # 0x7d
'[?]', # 0x7e
'[?]', # 0x7f
'[?]', # 0x80
'[?]', # 0x81
'N', # 0x82
'H', # 0x83
'[?]', # 0x84
'a', # 0x85
'aa', # 0x86
'i', # 0x87
'ii', # 0x88
'u', # 0x89
'uu', # 0x8a
'R', # 0x8b
'L', # 0x8c
'[?]', # 0x8d
'e', # 0x8e
'ee', # 0x8f
'ai', # 0x90
'[?]', # 0x91
'o', # 0x92
'oo', # 0x93
'au', # 0x94
'k', # 0x95
'kh', # 0x96
'g', # 0x97
'gh', # 0x98
'ng', # 0x99
'c', # 0x9a
'ch', # 0x9b
'j', # 0x9c
'jh', # 0x9d
'ny', # 0x9e
'tt', # 0x9f
'tth', # 0xa0
'dd', # 0xa1
'ddh', # 0xa2
'nn', # 0xa3
't', # 0xa4
'th', # 0xa5
'd', # 0xa6
'dh', # 0xa7
'n', # 0xa8
'[?]', # 0xa9
'p', # 0xaa
'ph', # 0xab
'b', # 0xac
'bh', # 0xad
'm', # 0xae
'y', # 0xaf
'r', # 0xb0
'rr', # 0xb1
'l', # 0xb2
'll', # 0xb3
'[?]', # 0xb4
'v', # 0xb5
'sh', # 0xb6
'ss', # 0xb7
's', # 0xb8
'h', # 0xb9
'[?]', # 0xba
'[?]', # 0xbb
'[?]', # 0xbc
'[?]', # 0xbd
'aa', # 0xbe
'i', # 0xbf
'ii', # 0xc0
'u', # 0xc1
'uu', # 0xc2
'R', # 0xc3
'RR', # 0xc4
'[?]', # 0xc5
'e', # 0xc6
'ee', # 0xc7
'ai', # 0xc8
'[?]', # 0xc9
'o', # 0xca
'oo', # 0xcb
'au', # 0xcc
'', # 0xcd
'[?]', # 0xce
'[?]', # 0xcf
'[?]', # 0xd0
'[?]', # 0xd1
'[?]', # 0xd2
'[?]', # 0xd3
'[?]', # 0xd4
'+', # 0xd5
'+', # 0xd6
'[?]', # 0xd7
'[?]', # 0xd8
'[?]', # 0xd9
'[?]', # 0xda
'[?]', # 0xdb
'[?]', # 0xdc
'[?]', # 0xdd
'lll', # 0xde
'[?]', # 0xdf
'RR', # 0xe0
'LL', # 0xe1
'[?]', # 0xe2
'[?]', # 0xe3
'[?]', # 0xe4
'[?]', # 0xe5
'0', # 0xe6
'1', # 0xe7
'2', # 0xe8
'3', # 0xe9
'4', # 0xea
'5', # 0xeb
'6', # 0xec
'7', # 0xed
'8', # 0xee
'9', # 0xef
'[?]', # 0xf0
'[?]', # 0xf1
'[?]', # 0xf2
'[?]', # 0xf3
'[?]', # 0xf4
'[?]', # 0xf5
'[?]', # 0xf6
'[?]', # 0xf7
'[?]', # 0xf8
'[?]', # 0xf9
'[?]', # 0xfa
'[?]', # 0xfb
'[?]', # 0xfc
'[?]', # 0xfd
'[?]', # 0xfe
)
|
denyszamiatin/to_do | refs/heads/master | tests/test_tasks.py | 1 | from unittest import TestCase
from main import add_task, tasks
class TestTasks(TestCase):
def test_add_tasks(self):
add_task('01.01.2015', 'Make coffee')
self.assertEqual(tasks, [['01.01.2015', 'Make coffee']])
|
Tejal011089/trufil-erpnext | refs/heads/master | erpnext/accounts/doctype/payment_tool/payment_tool.py | 30 | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe import _
from frappe.utils import flt
from frappe.model.document import Document
import json
class PaymentTool(Document):
def make_journal_entry(self):
from erpnext.accounts.utils import get_balance_on
total_payment_amount = 0.00
invoice_voucher_type = {
'Sales Invoice': 'against_invoice',
'Purchase Invoice': 'against_voucher',
'Journal Entry': 'against_jv',
'Sales Order': 'against_sales_order',
'Purchase Order': 'against_purchase_order',
}
jv = frappe.new_doc('Journal Entry')
jv.voucher_type = 'Journal Entry'
jv.company = self.company
jv.cheque_no = self.reference_no
jv.cheque_date = self.reference_date
if not self.total_payment_amount:
frappe.throw(_("Please enter Payment Amount in atleast one row"))
for v in self.get("vouchers"):
if not frappe.db.get_value(v.against_voucher_type, {"name": v.against_voucher_no}):
frappe.throw(_("Row {0}: {1} is not a valid {2}").format(v.idx, v.against_voucher_no,
v.against_voucher_type))
if v.payment_amount:
d1 = jv.append("accounts")
d1.account = self.party_account
d1.party_type = self.party_type
d1.party = self.party
d1.balance = get_balance_on(self.party_account)
d1.set("debit" if self.received_or_paid=="Paid" else "credit", flt(v.payment_amount))
d1.set(invoice_voucher_type.get(v.against_voucher_type), v.against_voucher_no)
d1.set('is_advance', 'Yes' if v.against_voucher_type in ['Sales Order', 'Purchase Order'] else 'No')
total_payment_amount = flt(total_payment_amount) + flt(d1.debit) - flt(d1.credit)
d2 = jv.append("accounts")
d2.account = self.payment_account
d2.set('debit' if total_payment_amount < 0 else 'credit', abs(total_payment_amount))
if self.payment_account:
d2.balance = get_balance_on(self.payment_account)
return jv.as_dict()
@frappe.whitelist()
def get_outstanding_vouchers(args):
from erpnext.accounts.utils import get_outstanding_invoices
if not frappe.has_permission("Payment Tool"):
frappe.throw(_("No permission to use Payment Tool"), frappe.PermissionError)
args = json.loads(args)
if args.get("party_type") == "Customer" and args.get("received_or_paid") == "Received":
amount_query = "ifnull(debit, 0) - ifnull(credit, 0)"
elif args.get("party_type") == "Supplier" and args.get("received_or_paid") == "Paid":
amount_query = "ifnull(credit, 0) - ifnull(debit, 0)"
else:
frappe.throw(_("Please enter the Against Vouchers manually"))
# Get all outstanding sales /purchase invoices
outstanding_invoices = get_outstanding_invoices(amount_query, args.get("party_account"),
args.get("party_type"), args.get("party"))
# Get all SO / PO which are not fully billed or aginst which full advance not paid
orders_to_be_billed = get_orders_to_be_billed(args.get("party_type"), args.get("party"))
return outstanding_invoices + orders_to_be_billed
def get_orders_to_be_billed(party_type, party):
voucher_type = 'Sales Order' if party_type == "Customer" else 'Purchase Order'
orders = frappe.db.sql("""
select
name as voucher_no,
ifnull(base_grand_total, 0) as invoice_amount,
(ifnull(base_grand_total, 0) - ifnull(advance_paid, 0)) as outstanding_amount,
transaction_date as posting_date
from
`tab%s`
where
%s = %s
and docstatus = 1
and ifnull(status, "") != "Stopped"
and ifnull(base_grand_total, 0) > ifnull(advance_paid, 0)
and abs(100 - ifnull(per_billed, 0)) > 0.01
""" % (voucher_type, 'customer' if party_type == "Customer" else 'supplier', '%s'),
party, as_dict = True)
order_list = []
for d in orders:
d["voucher_type"] = voucher_type
order_list.append(d)
return order_list
@frappe.whitelist()
def get_against_voucher_amount(against_voucher_type, against_voucher_no):
if against_voucher_type in ["Sales Order", "Purchase Order"]:
select_cond = "base_grand_total as total_amount, ifnull(base_grand_total, 0) - ifnull(advance_paid, 0) as outstanding_amount"
elif against_voucher_type in ["Sales Invoice", "Purchase Invoice"]:
select_cond = "base_grand_total as total_amount, outstanding_amount"
elif against_voucher_type == "Journal Entry":
select_cond = "total_debit as total_amount"
details = frappe.db.sql("""select {0} from `tab{1}` where name = %s"""
.format(select_cond, against_voucher_type), against_voucher_no, as_dict=1)
return details[0] if details else {}
|
chrismbarnes/ndnSIM | refs/heads/ndnSIM-v1 | src/network/test/examples-to-run.py | 129 | #! /usr/bin/env python
## -*- Mode: python; py-indent-offset: 4; indent-tabs-mode: nil; coding: utf-8; -*-
# A list of C++ examples to run in order to ensure that they remain
# buildable and runnable over time. Each tuple in the list contains
#
# (example_name, do_run, do_valgrind_run).
#
# See test.py for more information.
cpp_examples = [
("main-packet-header", "True", "True"),
("main-packet-tag", "True", "True"),
("red-tests", "True", "True"),
]
# A list of Python examples to run in order to ensure that they remain
# runnable over time. Each tuple in the list contains
#
# (example_name, do_run).
#
# See test.py for more information.
python_examples = []
|
perkinslr/pypyjs | refs/heads/master | addedLibraries/twisted/conch/mixin.py | 69 | # -*- test-case-name: twisted.conch.test.test_mixin -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Experimental optimization
This module provides a single mixin class which allows protocols to
collapse numerous small writes into a single larger one.
@author: Jp Calderone
"""
from twisted.internet import reactor
class BufferingMixin:
"""Mixin which adds write buffering.
"""
_delayedWriteCall = None
bytes = None
DELAY = 0.0
def schedule(self):
return reactor.callLater(self.DELAY, self.flush)
def reschedule(self, token):
token.reset(self.DELAY)
def write(self, bytes):
"""Buffer some bytes to be written soon.
Every call to this function delays the real write by C{self.DELAY}
seconds. When the delay expires, all collected bytes are written
to the underlying transport using L{ITransport.writeSequence}.
"""
if self._delayedWriteCall is None:
self.bytes = []
self._delayedWriteCall = self.schedule()
else:
self.reschedule(self._delayedWriteCall)
self.bytes.append(bytes)
def flush(self):
"""Flush the buffer immediately.
"""
self._delayedWriteCall = None
self.transport.writeSequence(self.bytes)
self.bytes = None
|
zhounanshu/Flask-AppBuilder | refs/heads/master | examples/issue_188/app/__init__.py | 9 | import logging
from flask import Flask
from flask.ext.appbuilder import SQLA, AppBuilder
"""
Logging configuration
"""
logging.basicConfig(format='%(asctime)s:%(levelname)s:%(name)s:%(message)s')
logging.getLogger().setLevel(logging.DEBUG)
app = Flask(__name__)
app.config.from_object('config')
db = SQLA(app)
appbuilder = AppBuilder(app, db.session)
"""
from sqlalchemy.engine import Engine
from sqlalchemy import event
#Only include this for SQLLite constraints
@event.listens_for(Engine, "connect")
def set_sqlite_pragma(dbapi_connection, connection_record):
# Will force sqllite contraint foreign keys
cursor = dbapi_connection.cursor()
cursor.execute("PRAGMA foreign_keys=ON")
cursor.close()
"""
from app import views
|
JanNash/sms-tools | refs/heads/master | lectures/05-Sinusoidal-model/plots-code/spectral-peaks-interpolation.py | 22 | import numpy as np
import matplotlib.pyplot as plt
from scipy.signal import hamming, triang, blackmanharris
from scipy.fftpack import fft, ifft
import math
import sys, os, functools, time
sys.path.append(os.path.join(os.path.dirname(os.path.realpath(__file__)), '../../../software/models/'))
import dftModel as DFT
import utilFunctions as UF
(fs, x) = UF.wavread('../../../sounds/oboe-A4.wav')
N = 512*2
M = 511
t = -60
w = np.hamming(M)
start = .8*fs
hN = N/2
hM = (M+1)/2
x1 = x[start:start+M]
mX, pX = DFT.dftAnal(x1, w, N)
ploc = UF.peakDetection(mX, t)
iploc, ipmag, ipphase = UF.peakInterp(mX, pX, ploc)
pmag = mX[ploc]
freqaxis = fs*np.arange(mX.size)/float(N)
plt.figure(1, figsize=(9.5, 5.5))
plt.subplot (2,1,1)
plt.plot(freqaxis,mX,'r', lw=1.5)
plt.axis([300,2500,-70,max(mX)])
plt.plot(fs * iploc / N, ipmag, marker='x', color='b', linestyle='', markeredgewidth=1.5)
plt.title('mX + spectral peaks (oboe-A4.wav)')
plt.subplot (2,1,2)
plt.plot(freqaxis,pX,'c', lw=1.5)
plt.axis([300,2500,min(pX),-1])
plt.plot(fs * iploc / N, ipphase, marker='x', color='b', linestyle='', markeredgewidth=1.5)
plt.title('pX + spectral peaks')
plt.tight_layout()
plt.savefig('spectral-peaks-interpolation.png')
plt.show()
|
gangadhar-kadam/church-erpnext | refs/heads/master | patches/april_2013/p03_fixes_for_lead_in_quotation.py | 6 | import webnotes
def execute():
webnotes.conn.sql("""update `tabQuotation` set customer_name = organization
where quotation_to = 'Lead' and ifnull(lead, '') != ''
and ifnull(organization, '') != ''""")
webnotes.conn.sql("""update `tabQuotation` set customer_name = lead_name
where quotation_to = 'Lead' and ifnull(lead, '') != ''
and ifnull(organization, '') = '' and ifnull(lead_name, '') != ''""")
webnotes.conn.sql("""update `tabQuotation` set contact_display = lead_name
where quotation_to = 'Lead' and ifnull(lead, '') != '' and ifnull(lead_name, '') != ''""")
webnotes.conn.sql("""update `tabOpportunity` set contact_display = lead_name
where enquiry_from = 'Lead' and ifnull(lead, '') != '' and ifnull(lead_name, '') != ''""")
webnotes.conn.sql("""update `tabOpportunity` opp, `tabLead` lead
set opp.customer_name = lead.company_name where opp.lead = lead.name""") |
nextgis/quickmapservices_server | refs/heads/master | qms_server/qms_site/tests.py | 24123 | from django.test import TestCase
# Create your tests here.
|
aikonbrasil/ns3-20 | refs/heads/master | doc/tutorial-pt-br/source/conf.py | 90 | # -*- coding: utf-8 -*-
#
# ns-3 documentation build configuration file, created by
# sphinx-quickstart on Tue Dec 14 09:00:39 2010.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.pngmath']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
source_encoding = 'latin1'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'ns-3'
copyright = u'2008-11, ns-3 project'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = 'ns-3-dev'
# The full version, including alpha/beta/rc tags.
release = 'ns-3-dev'
# The language for content autogenerated by . Refer to babel documentation
# for a list of supported languages.
language = 'pt_BR'
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'ns3_html_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = ['../..']
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
html_title = 'Rastreamento'
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
html_last_updated_fmt = '%b %d, %Y %H:%M'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'ns-3doc'
# -- Options for LaTeX output --------------------------------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'ns-3-tutorial.tex', u'ns-3 Rastreamento',
u'ns-3 project', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
latex_logo = '../../ns3_html_theme/static/ns-3.png'
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'ns-3-tutorial', u'ns-3 Tutorial',
[u'ns-3 project'], 1)
]
|
CCI-MOC/moc-openstack-tools | refs/heads/master | setpass.py | 1 | # Copyright 2016 Massachusetts Open Cloud
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import string
import random
def random_password(size):
"""Generate a random password of length 'size'
The resulting password may contain any of:
upper or lowercase letters A-Z
the digits 0-9
valid punctuation marks defined in the 'punctuation' variable below
"""
punctuation = '#$%&!'
chars = string.ascii_letters + string.digits + punctuation
return ''.join(random.choice(chars) for _ in range(size))
class SetpassClient:
"""Class for interacting with a Setpass server"""
def __init__(self, session, setpass_url):
self.url = setpass_url
self.session = session
def get_token(self, userid, password, pin):
"""Add the user ID and random password to the setpass database.
Returns a token allowing the user to set their password.
"""
body = {'password': password, 'pin': pin}
request_url = '{base}/token/{userid}'.format(base=self.url,
userid=userid)
response = self.session.put(request_url, json=body)
token = response.text
return token
def get_url(self, token):
""" Generate URL for the user to set their password """
url = "{base}?token={token}".format(base=self.url, token=token)
return url
|
kerneltask/micropython | refs/heads/master | tests/float/complex_special_mehods.py | 2 | # test complex interacting with special methods
class A:
def __add__(self, x):
print("__add__")
return 1
def __radd__(self, x):
print("__radd__")
return 2
print(A() + 1j)
print(1j + A())
|
mcanthony/libvips | refs/heads/master | python/Vips.py | 1 | # -*- Mode: Python; py-indent-offset: 4 -*-
# vim: tabstop=4 shiftwidth=4 expandtab
from __future__ import division
# overrides for pygobject gobject-introspection binding for libvips, tested
# with python2.7 and python3.4
# copy this file to dist-packages/gi/overrides, eg.
#
# sudo cp Vips.py /usr/lib/python2.7/dist-packages/gi/overrides
# sudo cp Vips.py /usr/lib/python3/dist-packages/gi/overrides
#
# Alternatively, build vips to another prefix, then copy Vips.py and Vips.pyc
# from $prefix/lib/python2.7/dist-packages/gi/overrides to /usr
# This file is part of VIPS.
#
# VIPS is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2 of the License, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for
# more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# These files are distributed with VIPS - http://www.vips.ecs.soton.ac.uk
import sys
import re
import logging
logger = logging.getLogger(__name__)
from gi.repository import GObject
from ..overrides import override
from ..module import get_introspection_module
Vips = get_introspection_module('Vips')
__all__ = []
# start up vips!
# passing argv[0] helps vips find its data files on some platforms
Vips.init(sys.argv[0])
# need the gtypes for various vips types
vips_type_array_int = GObject.GType.from_name("VipsArrayInt")
vips_type_array_double = GObject.GType.from_name("VipsArrayDouble")
vips_type_array_image = GObject.GType.from_name("VipsArrayImage")
vips_type_blob = GObject.GType.from_name("VipsBlob")
vips_type_image = GObject.GType.from_name("VipsImage")
vips_type_operation = GObject.GType.from_name("VipsOperation")
vips_type_ref_string = GObject.GType.from_name("VipsRefString")
def is_2D(value):
if not isinstance(value, list):
return False
for x in value:
if not isinstance(x, list):
return False
if len(x) != len(value[0]):
return False
return True
def imageize(match_image, value):
logger.debug('imageize match_image=%s, value=%s' % (match_image, value))
# 2D arrays become array images
if is_2D(value):
return Vips.Image.new_from_array(value)
# if there's nothing to match to, also make an array
if match_image is None:
return Vips.Image.new_from_array(value)
# assume this is a pixel constant ... expand into an image using
# match as a template
pixel = (Vips.Image.black(1, 1) + value).cast(match_image.format)
image = pixel.embed(0, 0, match_image.width, match_image.height,
extend = Vips.Extend.COPY)
image = image.copy(interpretation = match_image.interpretation,
xres = match_image.xres,
yres = match_image.yres)
return image
# we'd like to use memoryview to avoid copying things like ICC profiles, but
# unfortunately pygobject does not support this ... so for blobs we just use
# bytes().
unpack_types = [[Vips.Blob, lambda x: bytes(x.get())],
[Vips.RefString, lambda x: x.get()],
[Vips.ArrayDouble, lambda x: x.get()],
[Vips.ArrayImage, lambda x: x.get()],
[Vips.ArrayInt, lambda x: x.get()]]
def unpack(value):
for t, cast in unpack_types:
if isinstance(value, t):
return cast(value)
return value
def array_image_new(array):
match_image = None
for i in range(0, len(array)):
if isinstance(array[i], Vips.Image):
match_image = array[i]
break
if match_image is None:
raise Error('Unable to make image array argument.',
'Array must contain at least one image.')
for i in range(0, len(array)):
if not isinstance(array[i], Vips.Image):
array[i] = imageize(match_image, array[i])
return Vips.ArrayImage.new(array)
arrayize_types = [[vips_type_array_int, Vips.ArrayInt.new],
[vips_type_array_double, Vips.ArrayDouble.new],
[vips_type_array_image, array_image_new]]
def arrayize(gtype, value):
for t, cast in arrayize_types:
if GObject.type_is_a(gtype, t):
if not isinstance(value, list):
value = [value]
return cast(value)
return value
class Error(Exception):
"""An error from vips.
message -- a high-level description of the error
detail -- a string with some detailed diagnostics
"""
def __init__(self, message, detail = None):
self.message = message
if detail == None:
detail = Vips.error_buffer()
Vips.error_clear()
self.detail = detail
logger.debug('Error %s %s', self.message, self.detail)
def __str__(self):
return '%s\n %s' % (self.message, self.detail)
Vips.Error = Error
class Argument(object):
def __init__(self, op, prop):
self.op = op
self.prop = prop
self.name = re.sub("-", "_", prop.name)
self.flags = op.get_argument_flags(self.name)
self.priority = op.get_argument_priority(self.name)
self.isset = op.argument_isset(self.name)
def set_value(self, match_image, value):
logger.debug('assigning %s to %s' % (value, self.name))
logger.debug('%s needs a %s' % (self.name, self.prop.value_type))
# blob-ize
if GObject.type_is_a(self.prop.value_type, vips_type_blob):
if not isinstance(value, Vips.Blob):
value = Vips.Blob.new(None, value)
# image-ize
if GObject.type_is_a(self.prop.value_type, vips_type_image):
if not isinstance(value, Vips.Image):
value = imageize(match_image, value)
# array-ize some types, if necessary
value = arrayize(self.prop.value_type, value)
# MODIFY input images need to be copied before assigning them
if self.flags & Vips.ArgumentFlags.MODIFY:
# don't use .copy(): we want to make a new pipeline with no
# reference back to the old stuff ... this way we can free the
# previous image earlier
logger.debug('MODIFY argument: copying image')
new_image = Vips.Image.new_memory()
value.write(new_image)
value = new_image
logger.debug('assigning %s' % value)
self.op.props.__setattr__(self.name, value)
def get_value(self):
value = self.op.props.__getattribute__(self.name)
logger.debug('read out %s from %s' % (value, self.name))
return unpack(value)
def description(self):
result = self.name
result += " " * (10 - len(self.name)) + " -- " + self.prop.blurb
result += ", " + self.prop.value_type.name
return result
Vips.Argument = Argument
class Operation(Vips.Operation):
# find all the args for this op, sort into priority order
# remember to ignore deprecated ones
def get_args(self):
args = [Argument(self, x) for x in self.props]
args = [y for y in args
if not y.flags & Vips.ArgumentFlags.DEPRECATED]
args.sort(key = lambda x: x.priority)
return args
Operation = override(Operation)
__all__.append('Operation')
# search a list recursively for a Vips.Image object
def find_image(x):
if isinstance(x, Vips.Image):
return x
if isinstance(x, list):
for i in x:
y = find_image(i)
if y is not None:
return y
return None
def _call_base(name, required, optional, self = None, option_string = None):
logger.debug('_call_base name=%s, required=%s optional=%s' %
(name, required, optional))
if self:
logger.debug('_call_base self=%s' % self)
if option_string:
logger.debug('_call_base option_string = %s' % option_string)
try:
op = Vips.Operation.new(name)
except TypeError as e:
raise Error('No such operator.')
if op.get_flags() & Vips.OperationFlags.DEPRECATED:
raise Error('No such operator.', 'operator "%s" is deprecated' % name)
# set str options first so the user can't override things we set
# deliberately and break stuff
if option_string:
if op.set_from_string(option_string) != 0:
raise Error('Bad arguments.')
args = op.get_args()
enm = Vips.ArgumentFlags
# find all required, unassigned input args
required_input = [x for x in args if x.flags & enm.INPUT and
x.flags & enm.REQUIRED and
not x.isset]
# do we have a non-None self pointer? this is used to set the first
# compatible input arg
if self is not None:
found = False
for x in required_input:
if GObject.type_is_a(self, x.prop.value_type):
x.set_value(None, self)
required_input.remove(x)
found = True
break
if not found:
raise Error('Bad arguments.', 'No %s argument to %s.' %
(str(self.__class__), name))
if len(required_input) != len(required):
raise Error('Wrong number of arguments.',
'%s needs %d arguments, you supplied %d.' %
(name, len(required_input), len(required)))
# if we need an image arg but the user supplied a number or list of
# numbers, we expand it into an image automatically ... the number is
# expanded to match self, or if that's None, the first image we can find in
# the required or optional arguments
match_image = self
if match_image is None:
for arg in required:
match_image = find_image(arg)
if match_image is not None:
break
if match_image is None:
for arg_name in optional:
match_image = find_image(optional[arg_name])
if match_image is not None:
break
for i in range(len(required_input)):
required_input[i].set_value(match_image, required[i])
# find all optional, unassigned input args ... make a hash from name to
# Argument
optional_input = {x.name: x for x in args if x.flags & enm.INPUT and
not x.flags & enm.REQUIRED and
not x.isset}
# find all optional output args ... we use "x = True"
# in args to mean add that to output
optional_output = {x.name: x for x in args if x.flags & enm.OUTPUT and
not x.flags & enm.REQUIRED}
# set optional input args
for key in list(optional.keys()):
if key in optional_input:
optional_input[key].set_value(match_image, optional[key])
elif key in optional_output:
# must be a literal True value
if optional[key] is not True:
raise Error('Optional output argument must be True.',
'Argument %s should equal True.' % key)
else:
raise Error('Unknown argument.',
'Operator %s has no argument %s.' % (name, key))
# call
logger.debug('_call_base checking cache for op %s' % op)
op2 = Vips.cache_operation_build(op)
logger.debug('_call_base got op2 %s' % op2)
if op2 == None:
raise Error('Error calling operator %s.' % name)
# rescan args if op2 is different from op
if op2 != op:
logger.debug('_call_base rescanning args')
args = op2.get_args()
optional_output = {x.name: x for x in args if x.flags & enm.OUTPUT and
not x.flags & enm.REQUIRED}
# gather output args
logger.debug('_call_base fetching required output args')
out = []
for x in args:
# required output arg
if x.flags & enm.OUTPUT and x.flags & enm.REQUIRED:
out.append(x.get_value())
# modified input arg ... this will get the memory image we made above
if x.flags & enm.INPUT and x.flags & enm.MODIFY:
out.append(x.get_value())
logger.debug('_call_base fetching optional output args')
out_dict = {}
for x in list(optional.keys()):
if x in optional_output:
out_dict[x] = optional_output[x].get_value()
if out_dict != {}:
out.append(out_dict)
if len(out) == 1:
out = out[0]
elif len(out) == 0:
out = None
# unref everything now we have refs to all outputs we want
op2.unref_outputs()
logger.debug('success')
return out
# general user entrypoint
def call(name, *args, **kwargs):
return _call_base(name, args, kwargs)
Vips.call = call
# here from getattr ... try to run the attr as a method
def _call_instance(self, name, args, kwargs):
return _call_base(name, args, kwargs, self)
@classmethod
def vips_image_new_from_file(cls, vips_filename, **kwargs):
"""Create a new Image from a filename.
Extra optional arguments depend on the loader selected by libvips. See each
loader for details.
"""
filename = Vips.filename_get_filename(vips_filename)
option_string = Vips.filename_get_options(vips_filename)
loader = Vips.Foreign.find_load(filename)
if loader == None:
raise Error('No known loader for "%s".' % filename)
logger.debug('Image.new_from_file: loader = %s' % loader)
return _call_base(loader, [filename], kwargs, None, option_string)
setattr(Vips.Image, 'new_from_file', vips_image_new_from_file)
@classmethod
def vips_image_new_from_buffer(cls, data, option_string, **kwargs):
"""Create a new Image from binary data in a string.
data -- binary image data
option_string -- optional arguments in string form
option_string can be something like "page=10" to load the 10th page of a
tiff file. You can also give load options as keyword arguments.
"""
loader = Vips.Foreign.find_load_buffer(data)
if loader == None:
raise Error('No known loader for buffer.')
logger.debug('Image.new_from_buffer: loader = %s' % loader)
return _call_base(loader, [data], kwargs, None, option_string)
setattr(Vips.Image, 'new_from_buffer', vips_image_new_from_buffer)
@classmethod
def vips_image_new_from_array(cls, array, scale = 1, offset = 0):
"""Create a new image from an array.
The array argument can be a 1D array to create a height == 1 image, or a 2D
array to make a 2D image. Use scale and offset to set the scale factor,
handy for integer convolutions.
"""
# we accept a 1D array and assume height == 1, or a 2D array and check all
# lines are the same length
if not isinstance(array, list):
raise TypeError('new_from_array() takes a list argument')
if not isinstance(array[0], list):
height = 1
width = len(array)
else:
# must copy the first row, we don't want to modify the passed-in array
flat_array = list(array[0])
height = len(array)
width = len(array[0])
for i in range(1, height):
if len(array[i]) != width:
raise TypeError('new_from_array() array not rectangular')
flat_array += array[i]
array = flat_array
image = cls.new_matrix_from_array(width, height, array)
# be careful to set them as double
image.set('scale', float(scale))
image.set('offset', float(offset))
return image
setattr(Vips.Image, 'new_from_array', vips_image_new_from_array)
def generate_docstring(name):
try:
op = Vips.Operation.new(name)
except TypeError as e:
raise Error('No such operator.')
if op.get_flags() & Vips.OperationFlags.DEPRECATED:
raise Error('No such operator.', 'operator "%s" is deprecated' % name)
# find all the args for this op, sort into priority order
args = op.get_args()
enm = Vips.ArgumentFlags
# find all required, unassigned input args
required_input = [x for x in args if x.flags & enm.INPUT and
x.flags & enm.REQUIRED and
not x.isset]
optional_input = [x for x in args if x.flags & enm.INPUT and
not x.flags & enm.REQUIRED and
not x.isset]
required_output = [x for x in args if x.flags & enm.OUTPUT and
x.flags & enm.REQUIRED]
optional_output = [x for x in args if x.flags & enm.OUTPUT and
not x.flags & enm.REQUIRED]
# find the first required input image, if any ... we will be a member
# function of this instance
member_x = None
for i in range(0, len(required_input)):
x = required_input[i]
if GObject.type_is_a(vips_type_image, x.prop.value_type):
member_x = x
break
description = op.get_description()
result = description[0].upper() + description[1:] + ".\n\n"
result += "Usage:\n"
result += " " + ", ".join([x.name for x in required_output]) + " = "
if member_x:
result += member_x.name + "." + name + "("
else:
result += "Vips.Image." + name + "("
result += ", ".join([x.name for x in required_input
if x != member_x])
if len(optional_input) > 0:
result += ", "
result += ", ".join([x.name + " = " + x.prop.value_type.name
for x in optional_input])
result += ")\n"
result += "Where:\n"
for x in required_output:
result += " " + x.description() + "\n"
for x in required_input:
result += " " + x.description() + "\n"
if len(optional_input) > 0:
result += "Keyword parameters:\n"
for x in optional_input:
result += " " + x.description() + "\n"
if len(optional_output) > 0:
result += "Extra output options:\n"
for x in optional_output:
result += " " + x.description() + "\n"
return result
# apply a function to a thing, or map over a list
# we often need to do something like (1.0 / other) and need to work for lists
# as well as scalars
def smap(func, x):
if isinstance(x, list):
return list(map(func, x))
else:
return func(x)
# decorator to set docstring
def add_doc(value):
def _doc(func):
func.__doc__ = value
return func
return _doc
class Image(Vips.Image):
# for constructors, see class methods above
# output
def write_to_file(self, vips_filename, **kwargs):
"""Write an Image to a file.
The filename can contain save options, for example
"fred.tif[compression=jpeg]", or save options can be given as keyword
arguments. Save options depend on the selected saver.
"""
filename = Vips.filename_get_filename(vips_filename)
option_string = Vips.filename_get_options(vips_filename)
saver = Vips.Foreign.find_save(filename)
if saver == None:
raise Error('No known saver for "%s".' % filename)
logger.debug('Image.write_to_file: saver = %s' % saver)
_call_base(saver, [filename], kwargs, self, option_string)
def write_to_buffer(self, format_string, **kwargs):
"""Write an Image to memory.
Return the image as a binary string, encoded in the selected format.
Save options can be given in the format_string, for example
".jpg[Q=90]". Save options depend on the selected saver.
"""
filename = Vips.filename_get_filename(format_string)
option_string = Vips.filename_get_options(format_string)
saver = Vips.Foreign.find_save_buffer(filename)
if saver == None:
raise Error('No known saver for "%s".' % filename)
logger.debug('Image.write_to_buffer: saver = %s' % saver)
return _call_base(saver, [], kwargs, self, option_string)
# we can use Vips.Image.write_to_memory() directly
# support with in the most trivial way
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
pass
# operator overloads
def __getattr__(self, name):
logger.debug('Image.__getattr__ %s' % name)
# look up in props first, eg. x.props.width
if name in dir(self.props):
return getattr(self.props, name)
@add_doc(generate_docstring(name))
def call_function(*args, **kwargs):
return _call_instance(self, name, args, kwargs)
return call_function
def __add__(self, other):
if isinstance(other, Vips.Image):
return self.add(other)
else:
return self.linear(1, other)
def __radd__(self, other):
return self.__add__(other)
def __sub__(self, other):
if isinstance(other, Vips.Image):
return self.subtract(other)
else:
return self.linear(1, smap(lambda x: -1 * x, other))
def __rsub__(self, other):
return self.linear(-1, other)
def __mul__(self, other):
if isinstance(other, Vips.Image):
return self.multiply(other)
else:
return self.linear(other, 0)
def __rmul__(self, other):
return self.__mul__(other)
# a / const has always been a float in vips, so div and truediv are the
# same
def __div__(self, other):
if isinstance(other, Vips.Image):
return self.divide(other)
else:
return self.linear(smap(lambda x: 1.0 / x, other), 0)
def __rdiv__(self, other):
return (self ** -1) * other
def __truediv__(self, other):
return self.__div__(other)
def __rtruediv__(self, other):
return self.__rdiv__(other)
def __floordiv__(self, other):
if isinstance(other, Vips.Image):
return self.divide(other).floor()
else:
return self.linear(smap(lambda x: 1.0 / x, other), 0).floor()
def __rfloordiv__(self, other):
return ((self ** -1) * other).floor()
def __mod__(self, other):
if isinstance(other, Vips.Image):
return self.remainder(other)
else:
return self.remainder_const(other)
def __pow__(self, other):
if isinstance(other, Vips.Image):
return self.math2(other, Vips.OperationMath2.POW)
else:
return self.math2_const(other, Vips.OperationMath2.POW)
def __rpow__(self, other):
return self.math2_const(other, Vips.OperationMath2.WOP)
def __abs__(self):
return self.abs()
def __lshift__(self, other):
if isinstance(other, Vips.Image):
return self.boolean(other, Vips.OperationBoolean.LSHIFT)
else:
return self.boolean_const(other, Vips.OperationBoolean.LSHIFT)
def __rshift__(self, other):
if isinstance(other, Vips.Image):
return self.boolean(other, Vips.OperationBoolean.RSHIFT)
else:
return self.boolean_const(other, Vips.OperationBoolean.RSHIFT)
def __and__(self, other):
if isinstance(other, Vips.Image):
return self.boolean(other, Vips.OperationBoolean.AND)
else:
return self.boolean_const(other, Vips.OperationBoolean.AND)
def __rand__(self, other):
return self.__and__(other)
def __or__(self, other):
if isinstance(other, Vips.Image):
return self.boolean(other, Vips.OperationBoolean.OR)
else:
return self.boolean_const(other, Vips.OperationBoolean.OR)
def __ror__(self, other):
return self.__or__(other)
def __xor__(self, other):
if isinstance(other, Vips.Image):
return self.boolean(other, Vips.OperationBoolean.EOR)
else:
return self.boolean_const(other, Vips.OperationBoolean.EOR)
def __rxor__(self, other):
return self.__xor__(other)
def __neg__(self):
return -1 * self
def __pos__(self):
return self
def __invert__(self):
return self ^ -1
def __gt__(self, other):
if isinstance(other, Vips.Image):
return self.relational(other, Vips.OperationRelational.MORE)
else:
return self.relational_const(other, Vips.OperationRelational.MORE)
def __ge__(self, other):
if isinstance(other, Vips.Image):
return self.relational(other, Vips.OperationRelational.MOREEQ)
else:
return self.relational_const(other, Vips.OperationRelational.MOREEQ)
def __lt__(self, other):
if isinstance(other, Vips.Image):
return self.relational(other, Vips.OperationRelational.LESS)
else:
return self.relational_const(other, Vips.OperationRelational.LESS)
def __le__(self, other):
if isinstance(other, Vips.Image):
return self.relational(other, Vips.OperationRelational.LESSEQ)
else:
return self.relational_const(other, Vips.OperationRelational.LESSEQ)
def __eq__(self, other):
if isinstance(other, Vips.Image):
return self.relational(other, Vips.OperationRelational.EQUAL)
else:
return self.relational_const(other, Vips.OperationRelational.EQUAL)
def __ne__(self, other):
if isinstance(other, Vips.Image):
return self.relational(other, Vips.OperationRelational.NOTEQ)
else:
return self.relational_const(other, Vips.OperationRelational.NOTEQ)
def __getitem__(self, arg):
if isinstance(arg, slice):
i = 0
if arg.start != None:
i = arg.start
n = self.bands - i
if arg.stop != None:
if arg.stop < 0:
n = self.bands + arg.stop - i
else:
n = arg.stop - i
elif isinstance(arg, int):
i = arg
n = 1
else:
raise TypeError
if i < 0:
i = self.bands + i
if i < 0 or i >= self.bands:
raise IndexError
return self.extract_band(i, n = n)
def __call__(self, x, y):
return self.getpoint(x, y)
# the cast operators int(), long() and float() must return numeric types,
# so we can't define them for images
# a few useful things
def get_value(self, field):
"""Get a named item from an Image.
Fetch an item of metadata and convert it to a Python-friendly format.
For example, VipsBlob values will be converted to bytes().
"""
value = self.get(field)
logger.debug('read out %s from %s' % (value, self))
return unpack(value)
def set_value(self, field, value):
"""Set a named item on an Image.
Values are converted from Python types to something libvips can swallow.
For example, bytes() can be used to set VipsBlob fields.
"""
gtype = self.get_typeof(field)
logger.debug('assigning %s to %s' % (value, self))
logger.debug('%s needs a %s' % (self, gtype))
# blob-ize
if GObject.type_is_a(gtype, vips_type_blob):
if not isinstance(value, Vips.Blob):
value = Vips.Blob.new(None, value)
# image-ize
if GObject.type_is_a(gtype, vips_type_image):
if not isinstance(value, Vips.Image):
value = imageize(self, value)
# array-ize some types, if necessary
value = arrayize(gtype, value)
self.set(field, value)
def floor(self):
"""Return the largest integral value not greater than the argument."""
return self.round(Vips.OperationRound.FLOOR)
def ceil(self):
"""Return the smallest integral value not less than the argument."""
return self.round(Vips.OperationRound.CEIL)
def rint(self):
"""Return the nearest integral value."""
return self.round(Vips.OperationRound.RINT)
def bandand(self):
"""AND image bands together."""
return self.bandbool(Vips.OperationBoolean.AND)
def bandor(self):
"""OR image bands together."""
return self.bandbool(Vips.OperationBoolean.OR)
def bandeor(self):
"""EOR image bands together."""
return self.bandbool(Vips.OperationBoolean.EOR)
def bandsplit(self):
"""Split an n-band image into n separate images."""
return [x for x in self]
def bandjoin(self, other):
"""Join a set of images bandwise."""
if not isinstance(other, list):
other = [other]
return Vips.Image.bandjoin([self] + other)
def maxpos(self):
"""Return the coordinates of the image maximum."""
v, opts = self.max(x = True, y = True)
x = opts['x']
y = opts['y']
return v, x, y
def minpos(self):
"""Return the coordinates of the image minimum."""
v, opts = self.min(x = True, y = True)
x = opts['x']
y = opts['y']
return v, x, y
def real(self):
"""Return the real part of a complex image."""
return self.complexget(Vips.OperationComplexget.REAL)
def imag(self):
"""Return the imaginary part of a complex image."""
return self.complexget(Vips.OperationComplexget.IMAG)
def polar(self):
"""Return an image converted to polar coordinates."""
return self.complex(Vips.OperationComplex.POLAR)
def rect(self):
"""Return an image converted to rectangular coordinates."""
return self.complex(Vips.OperationComplex.RECT)
def conj(self):
"""Return the complex conjugate of an image."""
return self.complex(Vips.OperationComplex.CONJ)
def sin(self):
"""Return the sine of an image in degrees."""
return self.math(Vips.OperationMath.SIN)
def cos(self):
"""Return the cosine of an image in degrees."""
return self.math(Vips.OperationMath.COS)
def tan(self):
"""Return the tangent of an image in degrees."""
return self.math(Vips.OperationMath.TAN)
def asin(self):
"""Return the inverse sine of an image in degrees."""
return self.math(Vips.OperationMath.ASIN)
def acos(self):
"""Return the inverse cosine of an image in degrees."""
return self.math(Vips.OperationMath.ACOS)
def atan(self):
"""Return the inverse tangent of an image in degrees."""
return self.math(Vips.OperationMath.ATAN)
def log(self):
"""Return the natural log of an image."""
return self.math(Vips.OperationMath.LOG)
def log10(self):
"""Return the log base 10 of an image."""
return self.math(Vips.OperationMath.LOG10)
def exp(self):
"""Return e ** pixel."""
return self.math(Vips.OperationMath.EXP)
def exp10(self):
"""Return 10 ** pixel."""
return self.math(Vips.OperationMath.EXP10)
def erode(self, mask):
"""Erode with a structuring element."""
return self.morph(mask, Vips.OperationMorphology.ERODE)
def dilate(self, mask):
"""Dilate with a structuring element."""
return self.morph(mask, Vips.OperationMorphology.DILATE)
def median(self, size):
"""size x size median filter."""
return self.rank(size, size, (size * size) / 2)
def fliphor(self):
"""Flip horizontally."""
return self.flip(Vips.Direction.HORIZONTAL)
def flipver(self):
"""Flip vertically."""
return self.flip(Vips.Direction.VERTICAL)
def rot90(self):
"""Rotate 90 degrees clockwise."""
return self.rot(Vips.Angle.D90)
def rot180(self):
"""Rotate 180 degrees."""
return self.rot(Vips.Angle.D180)
def rot270(self):
"""Rotate 270 degrees clockwise."""
return self.rot(Vips.Angle.D270)
# we need different imageize rules for this operator ... we need to
# imageize th and el to match each other first
@add_doc(generate_docstring("ifthenelse"))
def ifthenelse(self, th, el, **kwargs):
for match_image in [th, el, self]:
if isinstance(match_image, Vips.Image):
break
if not isinstance(th, Vips.Image):
th = imageize(match_image, th)
if not isinstance(el, Vips.Image):
el = imageize(match_image, el)
return _call_base("ifthenelse", [th, el], kwargs, self)
# add operators which needs to be class methods
# use find_class_methods.py to generate this list
class_methods = [
"system",
"sum",
"bandjoin",
"bandrank",
"black",
"gaussnoise",
"text",
"xyz",
"gaussmat",
"logmat",
"eye",
"grey",
"zone",
"sines",
"mask_ideal",
"mask_ideal_ring",
"mask_ideal_band",
"mask_butterworth",
"mask_butterworth_ring",
"mask_butterworth_band",
"mask_gaussian",
"mask_gaussian_ring",
"mask_gaussian_band",
"mask_fractal",
"tonelut",
"identity",
"fractsurf",
"radload",
"ppmload",
"csvload",
"matrixload",
"analyzeload",
"rawload",
"vipsload",
"pngload",
"pngload_buffer",
"matload",
"jpegload",
"jpegload_buffer",
"webpload",
"webpload_buffer",
"tiffload",
"tiffload_buffer",
"openslideload",
"magickload",
"magickload_buffer",
"fitsload",
"openexrload"]
def generate_class_method(name):
@classmethod
@add_doc(generate_docstring(name))
def class_method(cls, *args, **kwargs):
return _call_base(name, args, kwargs)
return class_method
for nickname in class_methods:
logger.debug('adding %s as a class method' % nickname)
# some may be missing in this vips, eg. we might not have "webpload"
try:
method = generate_class_method(nickname)
setattr(Vips.Image, nickname, method)
except Error:
pass
Image = override(Image)
__all__.append('Image')
|
Zhongqilong/mykbengineer | refs/heads/master | kbe/src/lib/python/Lib/test/test_poplib.py | 72 | """Test script for poplib module."""
# Modified by Giampaolo Rodola' to give poplib.POP3 and poplib.POP3_SSL
# a real test suite
import poplib
import asyncore
import asynchat
import socket
import os
import time
import errno
from unittest import TestCase, skipUnless
from test import support as test_support
threading = test_support.import_module('threading')
HOST = test_support.HOST
PORT = 0
SUPPORTS_SSL = False
if hasattr(poplib, 'POP3_SSL'):
import ssl
from ssl import HAS_SNI
SUPPORTS_SSL = True
CERTFILE = os.path.join(os.path.dirname(__file__) or os.curdir, "keycert3.pem")
CAFILE = os.path.join(os.path.dirname(__file__) or os.curdir, "pycacert.pem")
else:
HAS_SNI = False
requires_ssl = skipUnless(SUPPORTS_SSL, 'SSL not supported')
# the dummy data returned by server when LIST and RETR commands are issued
LIST_RESP = b'1 1\r\n2 2\r\n3 3\r\n4 4\r\n5 5\r\n.\r\n'
RETR_RESP = b"""From: postmaster@python.org\
\r\nContent-Type: text/plain\r\n\
MIME-Version: 1.0\r\n\
Subject: Dummy\r\n\
\r\n\
line1\r\n\
line2\r\n\
line3\r\n\
.\r\n"""
class DummyPOP3Handler(asynchat.async_chat):
CAPAS = {'UIDL': [], 'IMPLEMENTATION': ['python-testlib-pop-server']}
def __init__(self, conn):
asynchat.async_chat.__init__(self, conn)
self.set_terminator(b"\r\n")
self.in_buffer = []
self.push('+OK dummy pop3 server ready. <timestamp>')
self.tls_active = False
self.tls_starting = False
def collect_incoming_data(self, data):
self.in_buffer.append(data)
def found_terminator(self):
line = b''.join(self.in_buffer)
line = str(line, 'ISO-8859-1')
self.in_buffer = []
cmd = line.split(' ')[0].lower()
space = line.find(' ')
if space != -1:
arg = line[space + 1:]
else:
arg = ""
if hasattr(self, 'cmd_' + cmd):
method = getattr(self, 'cmd_' + cmd)
method(arg)
else:
self.push('-ERR unrecognized POP3 command "%s".' %cmd)
def handle_error(self):
raise
def push(self, data):
asynchat.async_chat.push(self, data.encode("ISO-8859-1") + b'\r\n')
def cmd_echo(self, arg):
# sends back the received string (used by the test suite)
self.push(arg)
def cmd_user(self, arg):
if arg != "guido":
self.push("-ERR no such user")
self.push('+OK password required')
def cmd_pass(self, arg):
if arg != "python":
self.push("-ERR wrong password")
self.push('+OK 10 messages')
def cmd_stat(self, arg):
self.push('+OK 10 100')
def cmd_list(self, arg):
if arg:
self.push('+OK %s %s' % (arg, arg))
else:
self.push('+OK')
asynchat.async_chat.push(self, LIST_RESP)
cmd_uidl = cmd_list
def cmd_retr(self, arg):
self.push('+OK %s bytes' %len(RETR_RESP))
asynchat.async_chat.push(self, RETR_RESP)
cmd_top = cmd_retr
def cmd_dele(self, arg):
self.push('+OK message marked for deletion.')
def cmd_noop(self, arg):
self.push('+OK done nothing.')
def cmd_rpop(self, arg):
self.push('+OK done nothing.')
def cmd_apop(self, arg):
self.push('+OK done nothing.')
def cmd_quit(self, arg):
self.push('+OK closing.')
self.close_when_done()
def _get_capas(self):
_capas = dict(self.CAPAS)
if not self.tls_active and SUPPORTS_SSL:
_capas['STLS'] = []
return _capas
def cmd_capa(self, arg):
self.push('+OK Capability list follows')
if self._get_capas():
for cap, params in self._get_capas().items():
_ln = [cap]
if params:
_ln.extend(params)
self.push(' '.join(_ln))
self.push('.')
if SUPPORTS_SSL:
def cmd_stls(self, arg):
if self.tls_active is False:
self.push('+OK Begin TLS negotiation')
tls_sock = ssl.wrap_socket(self.socket, certfile=CERTFILE,
server_side=True,
do_handshake_on_connect=False,
suppress_ragged_eofs=False)
self.del_channel()
self.set_socket(tls_sock)
self.tls_active = True
self.tls_starting = True
self.in_buffer = []
self._do_tls_handshake()
else:
self.push('-ERR Command not permitted when TLS active')
def _do_tls_handshake(self):
try:
self.socket.do_handshake()
except ssl.SSLError as err:
if err.args[0] in (ssl.SSL_ERROR_WANT_READ,
ssl.SSL_ERROR_WANT_WRITE):
return
elif err.args[0] == ssl.SSL_ERROR_EOF:
return self.handle_close()
raise
except OSError as err:
if err.args[0] == errno.ECONNABORTED:
return self.handle_close()
else:
self.tls_active = True
self.tls_starting = False
def handle_read(self):
if self.tls_starting:
self._do_tls_handshake()
else:
try:
asynchat.async_chat.handle_read(self)
except ssl.SSLEOFError:
self.handle_close()
class DummyPOP3Server(asyncore.dispatcher, threading.Thread):
handler = DummyPOP3Handler
def __init__(self, address, af=socket.AF_INET):
threading.Thread.__init__(self)
asyncore.dispatcher.__init__(self)
self.create_socket(af, socket.SOCK_STREAM)
self.bind(address)
self.listen(5)
self.active = False
self.active_lock = threading.Lock()
self.host, self.port = self.socket.getsockname()[:2]
self.handler_instance = None
def start(self):
assert not self.active
self.__flag = threading.Event()
threading.Thread.start(self)
self.__flag.wait()
def run(self):
self.active = True
self.__flag.set()
while self.active and asyncore.socket_map:
self.active_lock.acquire()
asyncore.loop(timeout=0.1, count=1)
self.active_lock.release()
asyncore.close_all(ignore_all=True)
def stop(self):
assert self.active
self.active = False
self.join()
def handle_accepted(self, conn, addr):
self.handler_instance = self.handler(conn)
def handle_connect(self):
self.close()
handle_read = handle_connect
def writable(self):
return 0
def handle_error(self):
raise
class TestPOP3Class(TestCase):
def assertOK(self, resp):
self.assertTrue(resp.startswith(b"+OK"))
def setUp(self):
self.server = DummyPOP3Server((HOST, PORT))
self.server.start()
self.client = poplib.POP3(self.server.host, self.server.port, timeout=3)
def tearDown(self):
self.client.close()
self.server.stop()
def test_getwelcome(self):
self.assertEqual(self.client.getwelcome(),
b'+OK dummy pop3 server ready. <timestamp>')
def test_exceptions(self):
self.assertRaises(poplib.error_proto, self.client._shortcmd, 'echo -err')
def test_user(self):
self.assertOK(self.client.user('guido'))
self.assertRaises(poplib.error_proto, self.client.user, 'invalid')
def test_pass_(self):
self.assertOK(self.client.pass_('python'))
self.assertRaises(poplib.error_proto, self.client.user, 'invalid')
def test_stat(self):
self.assertEqual(self.client.stat(), (10, 100))
def test_list(self):
self.assertEqual(self.client.list()[1:],
([b'1 1', b'2 2', b'3 3', b'4 4', b'5 5'],
25))
self.assertTrue(self.client.list('1').endswith(b"OK 1 1"))
def test_retr(self):
expected = (b'+OK 116 bytes',
[b'From: postmaster@python.org', b'Content-Type: text/plain',
b'MIME-Version: 1.0', b'Subject: Dummy',
b'', b'line1', b'line2', b'line3'],
113)
foo = self.client.retr('foo')
self.assertEqual(foo, expected)
def test_too_long_lines(self):
self.assertRaises(poplib.error_proto, self.client._shortcmd,
'echo +%s' % ((poplib._MAXLINE + 10) * 'a'))
def test_dele(self):
self.assertOK(self.client.dele('foo'))
def test_noop(self):
self.assertOK(self.client.noop())
def test_rpop(self):
self.assertOK(self.client.rpop('foo'))
def test_apop(self):
self.assertOK(self.client.apop('foo', 'dummypassword'))
def test_top(self):
expected = (b'+OK 116 bytes',
[b'From: postmaster@python.org', b'Content-Type: text/plain',
b'MIME-Version: 1.0', b'Subject: Dummy', b'',
b'line1', b'line2', b'line3'],
113)
self.assertEqual(self.client.top(1, 1), expected)
def test_uidl(self):
self.client.uidl()
self.client.uidl('foo')
def test_capa(self):
capa = self.client.capa()
self.assertTrue('IMPLEMENTATION' in capa.keys())
def test_quit(self):
resp = self.client.quit()
self.assertTrue(resp)
self.assertIsNone(self.client.sock)
self.assertIsNone(self.client.file)
@requires_ssl
def test_stls_capa(self):
capa = self.client.capa()
self.assertTrue('STLS' in capa.keys())
@requires_ssl
def test_stls(self):
expected = b'+OK Begin TLS negotiation'
resp = self.client.stls()
self.assertEqual(resp, expected)
@requires_ssl
@skipUnless(HAS_SNI, 'No SNI support in ssl module')
def test_stls_context(self):
expected = b'+OK Begin TLS negotiation'
ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
ctx.load_verify_locations(CAFILE)
ctx.verify_mode = ssl.CERT_REQUIRED
ctx.check_hostname = True
with self.assertRaises(ssl.CertificateError):
resp = self.client.stls(context=ctx)
self.client = poplib.POP3("localhost", self.server.port, timeout=3)
resp = self.client.stls(context=ctx)
self.assertEqual(resp, expected)
if SUPPORTS_SSL:
class DummyPOP3_SSLHandler(DummyPOP3Handler):
def __init__(self, conn):
asynchat.async_chat.__init__(self, conn)
ssl_socket = ssl.wrap_socket(self.socket, certfile=CERTFILE,
server_side=True,
do_handshake_on_connect=False)
self.del_channel()
self.set_socket(ssl_socket)
# Must try handshake before calling push()
self.tls_active = True
self.tls_starting = True
self._do_tls_handshake()
self.set_terminator(b"\r\n")
self.in_buffer = []
self.push('+OK dummy pop3 server ready. <timestamp>')
@requires_ssl
class TestPOP3_SSLClass(TestPOP3Class):
# repeat previous tests by using poplib.POP3_SSL
def setUp(self):
self.server = DummyPOP3Server((HOST, PORT))
self.server.handler = DummyPOP3_SSLHandler
self.server.start()
self.client = poplib.POP3_SSL(self.server.host, self.server.port)
def test__all__(self):
self.assertIn('POP3_SSL', poplib.__all__)
def test_context(self):
ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
self.assertRaises(ValueError, poplib.POP3_SSL, self.server.host,
self.server.port, keyfile=CERTFILE, context=ctx)
self.assertRaises(ValueError, poplib.POP3_SSL, self.server.host,
self.server.port, certfile=CERTFILE, context=ctx)
self.assertRaises(ValueError, poplib.POP3_SSL, self.server.host,
self.server.port, keyfile=CERTFILE,
certfile=CERTFILE, context=ctx)
self.client.quit()
self.client = poplib.POP3_SSL(self.server.host, self.server.port,
context=ctx)
self.assertIsInstance(self.client.sock, ssl.SSLSocket)
self.assertIs(self.client.sock.context, ctx)
self.assertTrue(self.client.noop().startswith(b'+OK'))
def test_stls(self):
self.assertRaises(poplib.error_proto, self.client.stls)
test_stls_context = test_stls
def test_stls_capa(self):
capa = self.client.capa()
self.assertFalse('STLS' in capa.keys())
@requires_ssl
class TestPOP3_TLSClass(TestPOP3Class):
# repeat previous tests by using poplib.POP3.stls()
def setUp(self):
self.server = DummyPOP3Server((HOST, PORT))
self.server.start()
self.client = poplib.POP3(self.server.host, self.server.port, timeout=3)
self.client.stls()
def tearDown(self):
if self.client.file is not None and self.client.sock is not None:
try:
self.client.quit()
except poplib.error_proto:
# happens in the test_too_long_lines case; the overlong
# response will be treated as response to QUIT and raise
# this exception
self.client.close()
self.server.stop()
def test_stls(self):
self.assertRaises(poplib.error_proto, self.client.stls)
test_stls_context = test_stls
def test_stls_capa(self):
capa = self.client.capa()
self.assertFalse(b'STLS' in capa.keys())
class TestTimeouts(TestCase):
def setUp(self):
self.evt = threading.Event()
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.sock.settimeout(60) # Safety net. Look issue 11812
self.port = test_support.bind_port(self.sock)
self.thread = threading.Thread(target=self.server, args=(self.evt,self.sock))
self.thread.setDaemon(True)
self.thread.start()
self.evt.wait()
def tearDown(self):
self.thread.join()
del self.thread # Clear out any dangling Thread objects.
def server(self, evt, serv):
serv.listen(5)
evt.set()
try:
conn, addr = serv.accept()
conn.send(b"+ Hola mundo\n")
conn.close()
except socket.timeout:
pass
finally:
serv.close()
def testTimeoutDefault(self):
self.assertIsNone(socket.getdefaulttimeout())
socket.setdefaulttimeout(30)
try:
pop = poplib.POP3(HOST, self.port)
finally:
socket.setdefaulttimeout(None)
self.assertEqual(pop.sock.gettimeout(), 30)
pop.sock.close()
def testTimeoutNone(self):
self.assertIsNone(socket.getdefaulttimeout())
socket.setdefaulttimeout(30)
try:
pop = poplib.POP3(HOST, self.port, timeout=None)
finally:
socket.setdefaulttimeout(None)
self.assertIsNone(pop.sock.gettimeout())
pop.sock.close()
def testTimeoutValue(self):
pop = poplib.POP3(HOST, self.port, timeout=30)
self.assertEqual(pop.sock.gettimeout(), 30)
pop.sock.close()
def test_main():
tests = [TestPOP3Class, TestTimeouts,
TestPOP3_SSLClass, TestPOP3_TLSClass]
thread_info = test_support.threading_setup()
try:
test_support.run_unittest(*tests)
finally:
test_support.threading_cleanup(*thread_info)
if __name__ == '__main__':
test_main()
|
PriviPK/privipk-sync-engine | refs/heads/privipk | inbox/events/remote_sync.py | 1 | from datetime import datetime
from inbox.log import get_logger
logger = get_logger()
from inbox.basicauth import AccessNotEnabledError
from inbox.sync.base_sync import BaseSyncMonitor
from inbox.models import Event, Account, Calendar
from inbox.models.event import RecurringEvent, RecurringEventOverride
from inbox.util.debug import bind_context
from inbox.models.session import session_scope
from inbox.events.recurring import link_events
from inbox.events.google import GoogleEventsProvider
EVENT_SYNC_FOLDER_ID = -2
EVENT_SYNC_FOLDER_NAME = 'Events'
class EventSync(BaseSyncMonitor):
"""Per-account event sync engine."""
def __init__(self, email_address, provider_name, account_id, namespace_id,
poll_frequency=300):
bind_context(self, 'eventsync', account_id)
# Only Google for now, can easily parametrize by provider later.
self.provider = GoogleEventsProvider(account_id, namespace_id)
BaseSyncMonitor.__init__(self,
account_id,
namespace_id,
email_address,
EVENT_SYNC_FOLDER_ID,
EVENT_SYNC_FOLDER_NAME,
provider_name,
poll_frequency=poll_frequency)
def sync(self):
"""Query a remote provider for updates and persist them to the
database. This function runs every `self.poll_frequency`.
"""
self.log.info('syncing events')
# Get a timestamp before polling, so that we don't subsequently miss
# remote updates that happen while the poll loop is executing.
sync_timestamp = datetime.utcnow()
with session_scope() as db_session:
account = db_session.query(Account).get(self.account_id)
last_sync = account.last_synced_events
try:
deleted_uids, calendar_changes = self.provider.sync_calendars()
except AccessNotEnabledError:
self.log.warning(
'Access to provider calendar API not enabled; bypassing sync')
return
with session_scope() as db_session:
handle_calendar_deletes(self.namespace_id, deleted_uids,
self.log, db_session)
calendar_uids_and_ids = handle_calendar_updates(self.namespace_id,
calendar_changes,
self.log,
db_session)
db_session.commit()
for (uid, id_) in calendar_uids_and_ids:
event_changes = self.provider.sync_events(
uid, sync_from_time=last_sync)
with session_scope() as db_session:
handle_event_updates(self.namespace_id, id_, event_changes,
self.log, db_session)
db_session.commit()
with session_scope() as db_session:
account = db_session.query(Account).get(self.account_id)
account.last_synced_events = sync_timestamp
db_session.commit()
def handle_calendar_deletes(namespace_id, deleted_calendar_uids, log,
db_session):
"""Delete any local Calendar rows with uid in `deleted_calendar_uids`. This
delete cascades to associated events (if the calendar is gone, so are all
of its events)."""
deleted_count = 0
for uid in deleted_calendar_uids:
local_calendar = db_session.query(Calendar).filter(
Calendar.namespace_id == namespace_id,
Calendar.uid == uid).first()
if local_calendar is not None:
# Cascades to associated events via SQLAlchemy 'delete' cascade
db_session.delete(local_calendar)
deleted_count += 1
log.info('deleted calendars', deleted=deleted_count)
def handle_calendar_updates(namespace_id, calendars, log, db_session):
"""Persists new or updated Calendar objects to the database."""
ids_ = []
added_count = 0
updated_count = 0
for calendar in calendars:
assert calendar.uid is not None, 'Got remote item with null uid'
local_calendar = db_session.query(Calendar).filter(
Calendar.namespace_id == namespace_id,
Calendar.uid == calendar.uid).first()
if local_calendar is not None:
local_calendar.update(calendar)
updated_count += 1
else:
local_calendar = Calendar(namespace_id=namespace_id)
local_calendar.update(calendar)
db_session.add(local_calendar)
db_session.flush()
added_count += 1
ids_.append((local_calendar.uid, local_calendar.id))
log.info('synced added and updated calendars', added=added_count,
updated=updated_count)
return ids_
def handle_event_updates(namespace_id, calendar_id, events, log, db_session):
"""Persists new or updated Event objects to the database."""
added_count = 0
updated_count = 0
for event in events:
assert event.uid is not None, 'Got remote item with null uid'
# Note: we could bulk-load previously existing events instead of
# loading them one-by-one. This would make the first sync faster, and
# probably not really affect anything else.
local_event = db_session.query(Event).filter(
Event.namespace_id == namespace_id,
Event.calendar_id == calendar_id,
Event.uid == event.uid).first()
if local_event is not None:
# We also need to mark all overrides as cancelled if we're
# cancelling a recurring event. However, note the original event
# may not itself be recurring (recurrence may have been added).
if isinstance(local_event, RecurringEvent) and \
event.status == 'cancelled' and \
local_event.status != 'cancelled':
for override in local_event.overrides:
override.status = 'cancelled'
local_event.update(event)
updated_count += 1
else:
local_event = event
local_event.namespace_id = namespace_id
local_event.calendar_id = calendar_id
db_session.add(local_event)
added_count += 1
# If we just updated/added a recurring event or override, make sure
# we link it to the right master event.
if isinstance(event, RecurringEvent) or \
isinstance(event, RecurringEventOverride):
db_session.flush()
link_events(db_session, event)
log.info('synced added and updated events',
calendar_id=calendar_id,
added=added_count,
updated=updated_count)
|
memento7/KINCluster | refs/heads/master | tests/test_extractor.py | 1 | # -*- coding: utf-8 -*-
"""
tests.cluster
---------------
Test cluster of KINCluster
:author: MaybeS(maytryark@gmail.com)
"""
import pytest
from KINCluster.core.extractor import Extractor, extractable
from KINCluster.core.cluster import Cluster
from KINCluster.core.pipeline import Pipeline
from KINCluster.core.item import Item
from KINCluster.lib.tokenizer import tokenize, stemize
import codecs
test_text = ['2016ํ๋1.txt', '2014ํ๋1.txt']
test_keyword = ['ํ๋ฒํ๊ฒฐ๋ฌธ', 'ํ๋ฒํ๊ฒฐ๋ฌธ']
class Pipeline(Pipeline):
def capture_item(self):
for text, keyword in zip(test_text, test_keyword):
with codecs.open('tests/data/' + text, 'r', 'utf-8') as f:
content = f.read()
yield Item(title=text,content=content,keyword=keyword,date='')
def test_extractor1():
cluster = Cluster(epoch=32, tokenizer="tokenize")
pipeline = Pipeline()
for item in pipeline.capture_item():
cluster.put_item(item)
cluster.cluster()
extractor = Extractor(cluster)
for idx, dump in enumerate(cluster.dumps):
items, vectors, counter = map(list, zip(*dump))
assert set(['items', 'vectors', 'counter', 'center', 'keywords']) == set(extractable.s.keys())
extracted = extractor.dump(idx)
assert isinstance(extracted, Item)
assert isinstance(extracted.keywords, list)
assert 32 == len(extracted.keywords) |
Yen-Chung-En/2015cdb_g1 | refs/heads/master | static/Brython3.1.1-20150328-091302/Lib/site.py | 805 | import sys
|
Communities-Communications/cc-odoo | refs/heads/master | openerp/modules/graph.py | 260 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
# Copyright (C) 2010-2014 OpenERP s.a. (<http://openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
""" Modules dependency graph. """
import os, sys, imp
from os.path import join as opj
import itertools
import zipimport
import openerp
import openerp.osv as osv
import openerp.tools as tools
import openerp.tools.osutil as osutil
from openerp.tools.safe_eval import safe_eval as eval
from openerp.tools.translate import _
import zipfile
import openerp.release as release
import re
import base64
from zipfile import PyZipFile, ZIP_DEFLATED
from cStringIO import StringIO
import logging
_logger = logging.getLogger(__name__)
class Graph(dict):
""" Modules dependency graph.
The graph is a mapping from module name to Nodes.
"""
def add_node(self, name, info):
max_depth, father = 0, None
for d in info['depends']:
n = self.get(d) or Node(d, self, None) # lazy creation, do not use default value for get()
if n.depth >= max_depth:
father = n
max_depth = n.depth
if father:
return father.add_child(name, info)
else:
return Node(name, self, info)
def update_from_db(self, cr):
if not len(self):
return
# update the graph with values from the database (if exist)
## First, we set the default values for each package in graph
additional_data = dict((key, {'id': 0, 'state': 'uninstalled', 'dbdemo': False, 'installed_version': None}) for key in self.keys())
## Then we get the values from the database
cr.execute('SELECT name, id, state, demo AS dbdemo, latest_version AS installed_version'
' FROM ir_module_module'
' WHERE name IN %s',(tuple(additional_data),)
)
## and we update the default values with values from the database
additional_data.update((x['name'], x) for x in cr.dictfetchall())
for package in self.values():
for k, v in additional_data[package.name].items():
setattr(package, k, v)
def add_module(self, cr, module, force=None):
self.add_modules(cr, [module], force)
def add_modules(self, cr, module_list, force=None):
if force is None:
force = []
packages = []
len_graph = len(self)
for module in module_list:
# This will raise an exception if no/unreadable descriptor file.
# NOTE The call to load_information_from_description_file is already
# done by db.initialize, so it is possible to not do it again here.
info = openerp.modules.module.load_information_from_description_file(module)
if info and info['installable']:
packages.append((module, info)) # TODO directly a dict, like in get_modules_with_version
else:
_logger.warning('module %s: not installable, skipped', module)
dependencies = dict([(p, info['depends']) for p, info in packages])
current, later = set([p for p, info in packages]), set()
while packages and current > later:
package, info = packages[0]
deps = info['depends']
# if all dependencies of 'package' are already in the graph, add 'package' in the graph
if reduce(lambda x, y: x and y in self, deps, True):
if not package in current:
packages.pop(0)
continue
later.clear()
current.remove(package)
node = self.add_node(package, info)
for kind in ('init', 'demo', 'update'):
if package in tools.config[kind] or 'all' in tools.config[kind] or kind in force:
setattr(node, kind, True)
else:
later.add(package)
packages.append((package, info))
packages.pop(0)
self.update_from_db(cr)
for package in later:
unmet_deps = filter(lambda p: p not in self, dependencies[package])
_logger.error('module %s: Unmet dependencies: %s', package, ', '.join(unmet_deps))
result = len(self) - len_graph
if result != len(module_list):
_logger.warning('Some modules were not loaded.')
return result
def __iter__(self):
level = 0
done = set(self.keys())
while done:
level_modules = sorted((name, module) for name, module in self.items() if module.depth==level)
for name, module in level_modules:
done.remove(name)
yield module
level += 1
def __str__(self):
return '\n'.join(str(n) for n in self if n.depth == 0)
class Node(object):
""" One module in the modules dependency graph.
Node acts as a per-module singleton. A node is constructed via
Graph.add_module() or Graph.add_modules(). Some of its fields are from
ir_module_module (setted by Graph.update_from_db()).
"""
def __new__(cls, name, graph, info):
if name in graph:
inst = graph[name]
else:
inst = object.__new__(cls)
graph[name] = inst
return inst
def __init__(self, name, graph, info):
self.name = name
self.graph = graph
self.info = info or getattr(self, 'info', {})
if not hasattr(self, 'children'):
self.children = []
if not hasattr(self, 'depth'):
self.depth = 0
@property
def data(self):
return self.info
def add_child(self, name, info):
node = Node(name, self.graph, info)
node.depth = self.depth + 1
if node not in self.children:
self.children.append(node)
for attr in ('init', 'update', 'demo'):
if hasattr(self, attr):
setattr(node, attr, True)
self.children.sort(lambda x, y: cmp(x.name, y.name))
return node
def __setattr__(self, name, value):
super(Node, self).__setattr__(name, value)
if name in ('init', 'update', 'demo'):
tools.config[name][self.name] = 1
for child in self.children:
setattr(child, name, value)
if name == 'depth':
for child in self.children:
setattr(child, name, value + 1)
def __iter__(self):
return itertools.chain(iter(self.children), *map(iter, self.children))
def __str__(self):
return self._pprint()
def _pprint(self, depth=0):
s = '%s\n' % self.name
for c in self.children:
s += '%s`-> %s' % (' ' * depth, c._pprint(depth+1))
return s
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
shaistaansari/django | refs/heads/master | tests/gis_tests/gis_migrations/migrations/0001_initial.py | 46 | from django.db import connection, migrations, models
from ...models import models as gis_models
ops = [
migrations.CreateModel(
name='Neighborhood',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=100, unique=True)),
('geom', gis_models.MultiPolygonField(srid=4326)),
],
options={
'required_db_features': ['gis_enabled'],
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Household',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('neighborhood', models.ForeignKey(to='gis_migrations.Neighborhood', to_field='id', null=True)),
('address', models.CharField(max_length=100)),
('zip_code', models.IntegerField(null=True, blank=True)),
('geom', gis_models.PointField(srid=4326, geography=True)),
],
options={
'required_db_features': ['gis_enabled'],
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Family',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=100, unique=True)),
],
options={
},
bases=(models.Model,),
),
migrations.AddField(
model_name='household',
name='family',
field=models.ForeignKey(blank=True, to='gis_migrations.Family', null=True),
preserve_default=True,
)
]
if connection.features.gis_enabled and connection.features.supports_raster:
ops += [
migrations.CreateModel(
name='Heatmap',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=100, unique=True)),
('rast', gis_models.fields.RasterField(srid=4326)),
],
options={
},
bases=(models.Model,),
),
]
class Migration(migrations.Migration):
"""
Used for gis-specific migration tests.
"""
operations = ops
|
GdZ/scriptfile | refs/heads/master | software/googleAppEngine/lib/django_1_2/django/contrib/auth/tests/views.py | 43 | import os
import re
import urllib
from django.conf import settings
from django.contrib.auth import SESSION_KEY, REDIRECT_FIELD_NAME
from django.contrib.auth.forms import AuthenticationForm
from django.contrib.sites.models import Site, RequestSite
from django.contrib.auth.models import User
from django.test import TestCase
from django.core import mail
from django.core.urlresolvers import reverse
class AuthViewsTestCase(TestCase):
"""
Helper base class for all the follow test cases.
"""
fixtures = ['authtestdata.json']
urls = 'django.contrib.auth.tests.urls'
def setUp(self):
self.old_LANGUAGES = settings.LANGUAGES
self.old_LANGUAGE_CODE = settings.LANGUAGE_CODE
settings.LANGUAGES = (('en', 'English'),)
settings.LANGUAGE_CODE = 'en'
self.old_TEMPLATE_DIRS = settings.TEMPLATE_DIRS
settings.TEMPLATE_DIRS = (
os.path.join(
os.path.dirname(__file__),
'templates'
)
,)
def tearDown(self):
settings.LANGUAGES = self.old_LANGUAGES
settings.LANGUAGE_CODE = self.old_LANGUAGE_CODE
settings.TEMPLATE_DIRS = self.old_TEMPLATE_DIRS
class PasswordResetTest(AuthViewsTestCase):
def test_email_not_found(self):
"Error is raised if the provided email address isn't currently registered"
response = self.client.get('/password_reset/')
self.assertEquals(response.status_code, 200)
response = self.client.post('/password_reset/', {'email': 'not_a_real_email@email.com'})
self.assertContains(response, "That e-mail address doesn't have an associated user account")
self.assertEquals(len(mail.outbox), 0)
def test_email_found(self):
"Email is sent if a valid email address is provided for password reset"
response = self.client.post('/password_reset/', {'email': 'staffmember@example.com'})
self.assertEquals(response.status_code, 302)
self.assertEquals(len(mail.outbox), 1)
self.assert_("http://" in mail.outbox[0].body)
def _test_confirm_start(self):
# Start by creating the email
response = self.client.post('/password_reset/', {'email': 'staffmember@example.com'})
self.assertEquals(response.status_code, 302)
self.assertEquals(len(mail.outbox), 1)
return self._read_signup_email(mail.outbox[0])
def _read_signup_email(self, email):
urlmatch = re.search(r"https?://[^/]*(/.*reset/\S*)", email.body)
self.assert_(urlmatch is not None, "No URL found in sent email")
return urlmatch.group(), urlmatch.groups()[0]
def test_confirm_valid(self):
url, path = self._test_confirm_start()
response = self.client.get(path)
# redirect to a 'complete' page:
self.assertEquals(response.status_code, 200)
self.assert_("Please enter your new password" in response.content)
def test_confirm_invalid(self):
url, path = self._test_confirm_start()
# Let's munge the token in the path, but keep the same length,
# in case the URLconf will reject a different length.
path = path[:-5] + ("0"*4) + path[-1]
response = self.client.get(path)
self.assertEquals(response.status_code, 200)
self.assert_("The password reset link was invalid" in response.content)
def test_confirm_invalid_user(self):
# Ensure that we get a 200 response for a non-existant user, not a 404
response = self.client.get('/reset/123456-1-1/')
self.assertEquals(response.status_code, 200)
self.assert_("The password reset link was invalid" in response.content)
def test_confirm_overflow_user(self):
# Ensure that we get a 200 response for a base36 user id that overflows int
response = self.client.get('/reset/zzzzzzzzzzzzz-1-1/')
self.assertEquals(response.status_code, 200)
self.assert_("The password reset link was invalid" in response.content)
def test_confirm_invalid_post(self):
# Same as test_confirm_invalid, but trying
# to do a POST instead.
url, path = self._test_confirm_start()
path = path[:-5] + ("0"*4) + path[-1]
response = self.client.post(path, {'new_password1': 'anewpassword',
'new_password2':' anewpassword'})
# Check the password has not been changed
u = User.objects.get(email='staffmember@example.com')
self.assert_(not u.check_password("anewpassword"))
def test_confirm_complete(self):
url, path = self._test_confirm_start()
response = self.client.post(path, {'new_password1': 'anewpassword',
'new_password2': 'anewpassword'})
# It redirects us to a 'complete' page:
self.assertEquals(response.status_code, 302)
# Check the password has been changed
u = User.objects.get(email='staffmember@example.com')
self.assert_(u.check_password("anewpassword"))
# Check we can't use the link again
response = self.client.get(path)
self.assertEquals(response.status_code, 200)
self.assert_("The password reset link was invalid" in response.content)
def test_confirm_different_passwords(self):
url, path = self._test_confirm_start()
response = self.client.post(path, {'new_password1': 'anewpassword',
'new_password2':' x'})
self.assertEquals(response.status_code, 200)
self.assert_("The two password fields didn't match" in response.content)
class ChangePasswordTest(AuthViewsTestCase):
def login(self, password='password'):
response = self.client.post('/login/', {
'username': 'testclient',
'password': password
}
)
self.assertEquals(response.status_code, 302)
self.assert_(response['Location'].endswith(settings.LOGIN_REDIRECT_URL))
def fail_login(self, password='password'):
response = self.client.post('/login/', {
'username': 'testclient',
'password': password
}
)
self.assertEquals(response.status_code, 200)
self.assert_("Please enter a correct username and password. Note that both fields are case-sensitive." in response.content)
def logout(self):
response = self.client.get('/logout/')
def test_password_change_fails_with_invalid_old_password(self):
self.login()
response = self.client.post('/password_change/', {
'old_password': 'donuts',
'new_password1': 'password1',
'new_password2': 'password1',
}
)
self.assertEquals(response.status_code, 200)
self.assert_("Your old password was entered incorrectly. Please enter it again." in response.content)
def test_password_change_fails_with_mismatched_passwords(self):
self.login()
response = self.client.post('/password_change/', {
'old_password': 'password',
'new_password1': 'password1',
'new_password2': 'donuts',
}
)
self.assertEquals(response.status_code, 200)
self.assert_("The two password fields didn't match." in response.content)
def test_password_change_succeeds(self):
self.login()
response = self.client.post('/password_change/', {
'old_password': 'password',
'new_password1': 'password1',
'new_password2': 'password1',
}
)
self.assertEquals(response.status_code, 302)
self.assert_(response['Location'].endswith('/password_change/done/'))
self.fail_login()
self.login(password='password1')
class LoginTest(AuthViewsTestCase):
def test_current_site_in_context_after_login(self):
response = self.client.get(reverse('django.contrib.auth.views.login'))
self.assertEquals(response.status_code, 200)
if Site._meta.installed:
site = Site.objects.get_current()
self.assertEquals(response.context['site'], site)
self.assertEquals(response.context['site_name'], site.name)
else:
self.assertTrue(isinstance(response.context['site'], RequestSite))
self.assert_(isinstance(response.context['form'], AuthenticationForm),
'Login form is not an AuthenticationForm')
def test_security_check(self, password='password'):
login_url = reverse('django.contrib.auth.views.login')
# Those URLs should not pass the security check
for bad_url in ('http://example.com',
'https://example.com',
'ftp://exampel.com',
'//example.com'):
nasty_url = '%(url)s?%(next)s=%(bad_url)s' % {
'url': login_url,
'next': REDIRECT_FIELD_NAME,
'bad_url': urllib.quote(bad_url)
}
response = self.client.post(nasty_url, {
'username': 'testclient',
'password': password,
}
)
self.assertEquals(response.status_code, 302)
self.assertFalse(bad_url in response['Location'], "%s should be blocked" % bad_url)
# Now, these URLs have an other URL as a GET parameter and therefore
# should be allowed
for url_ in ('http://example.com', 'https://example.com',
'ftp://exampel.com', '//example.com'):
safe_url = '%(url)s?%(next)s=/view/?param=%(safe_param)s' % {
'url': login_url,
'next': REDIRECT_FIELD_NAME,
'safe_param': urllib.quote(url_)
}
response = self.client.post(safe_url, {
'username': 'testclient',
'password': password,
}
)
self.assertEquals(response.status_code, 302)
self.assertTrue('/view/?param=%s' % url_ in response['Location'], "/view/?param=%s should be allowed" % url_)
class LogoutTest(AuthViewsTestCase):
urls = 'django.contrib.auth.tests.urls'
def login(self, password='password'):
response = self.client.post('/login/', {
'username': 'testclient',
'password': password
}
)
self.assertEquals(response.status_code, 302)
self.assert_(response['Location'].endswith(settings.LOGIN_REDIRECT_URL))
self.assert_(SESSION_KEY in self.client.session)
def confirm_logged_out(self):
self.assert_(SESSION_KEY not in self.client.session)
def test_logout_default(self):
"Logout without next_page option renders the default template"
self.login()
response = self.client.get('/logout/')
self.assertEquals(200, response.status_code)
self.assert_('Logged out' in response.content)
self.confirm_logged_out()
def test_14377(self):
# Bug 14377
self.login()
response = self.client.get('/logout/')
self.assertTrue('site' in response.context)
def test_logout_with_next_page_specified(self):
"Logout with next_page option given redirects to specified resource"
self.login()
response = self.client.get('/logout/next_page/')
self.assertEqual(response.status_code, 302)
self.assert_(response['Location'].endswith('/somewhere/'))
self.confirm_logged_out()
def test_logout_with_redirect_argument(self):
"Logout with query string redirects to specified resource"
self.login()
response = self.client.get('/logout/?next=/login/')
self.assertEqual(response.status_code, 302)
self.assert_(response['Location'].endswith('/login/'))
self.confirm_logged_out()
def test_logout_with_custom_redirect_argument(self):
"Logout with custom query string redirects to specified resource"
self.login()
response = self.client.get('/logout/custom_query/?follow=/somewhere/')
self.assertEqual(response.status_code, 302)
self.assert_(response['Location'].endswith('/somewhere/'))
self.confirm_logged_out()
|
mx3L/enigma2 | refs/heads/master | tests/test_timer.py | 56 | import enigma
import sys
import time
import tests
#enigma.reset()
def test_timer(repeat = 0, timer_start = 3600, timer_length = 1000, sim_length = 86400 * 7):
import NavigationInstance
at = time.time()
t = NavigationInstance.instance.RecordTimer
print t
print "old mwt:", t.MaxWaitTime
t.MaxWaitTime = 86400 * 1000
# hack:
NavigationInstance.instance.SleepTimer.MaxWaitTime = 86400 * 1000
t.processed_timers = [ ]
t.timer_list = [ ]
# generate a timer to test
import xml.etree.cElementTree
import RecordTimer
timer = RecordTimer.createTimer(xml.etree.cElementTree.fromstring(
"""
<timer
begin="%d"
end="%d"
serviceref="1:0:1:6DD2:44D:1:C00000:0:0:0:"
repeated="%d"
name="Test Event Name"
description="Test Event Description"
afterevent="nothing"
eit="56422"
disabled="0"
justplay="0">
</timer>""" % (at + timer_start, at + timer_start + timer_length, repeat)
))
t.record(timer)
# run virtual environment
enigma.run(sim_length)
print "done."
timers = t.processed_timers + t.timer_list
print "start: %s" % (time.ctime(at + 10))
assert len(timers) == 1
for t in timers:
print "begin=%d, end=%d, repeated=%d, state=%d" % (t.begin - at, t.end - at, t.repeated, t.state)
print "begin: %s" % (time.ctime(t.begin))
print "end: %s" % (time.ctime(t.end))
# if repeat, check if the calculated repeated time of day matches the initial time of day
if repeat:
t_initial = time.localtime(at + timer_start)
t_repeated = time.localtime(timers[0].begin)
print t_initial
print t_repeated
if t_initial[3:6] != t_repeated[3:6]:
raise tests.TestError("repeated timer time of day does not match")
import FakeNotifications
#sys.modules["Tools.Notifications"] = FakeNotifications
#sys.modules["Tools.NumericalTextInput.NumericalTextInput"] = FakeNotifications
# required stuff for timer (we try to keep this minimal)
enigma.init_nav()
enigma.init_record_config()
enigma.init_parental_control()
from events import log
import calendar
import os
# we are operating in CET/CEST
os.environ['TZ'] = 'CET'
time.tzset()
#log(test_timer, test_name = "test_timer_repeating", base_time = calendar.timegm((2007, 3, 1, 12, 0, 0)), repeat=0x7f, sim_length = 86400 * 7)
log(test_timer, test_name = "test_timer_repeating_dst_skip", base_time = calendar.timegm((2007, 03, 20, 0, 0, 0)), timer_start = 3600, repeat=0x7f, sim_length = 86400 * 7)
#log(test_timer, test_name = "test_timer_repeating_dst_start", base_time = calendar.timegm((2007, 03, 20, 0, 0, 0)), timer_start = 10000, repeat=0x7f, sim_length = 86400 * 7)
|
jlcjunk/pynet_pac | refs/heads/master | class7/exer01.py | 1 | #!/usr/bin/env python
'''
Use Arista's eAPI to obtain 'show interfaces' from the switch.
'''
# imports
try:
import ssl
import jsonrpclib
except ImportError:
print "Could not import a required module.\n Exiting"
raise SystemExit
# Variables
DEV_NAME = 'pynet-sw4'
DEV_USERID = 'eapi'
DEV_PASSWORD = '17mendel'
DEV_IP = '184.105.247.75'
DEV_PORT = '443'
COMMAND_TO_RUN = ['show interfaces']
# set url to use when connecting to device
DEV_URL = 'https://{}:{}@{}:{}/command-api'.format(DEV_USERID, DEV_PASSWORD, DEV_IP, DEV_PORT)
def main():
'''
main app
'''
# Allow trusting unntrusted certs
ssl._create_default_https_context = ssl._create_unverified_context
# setup connection to device
dev_connection = jsonrpclib.Server(DEV_URL)
# execute commands
cmd_results = dev_connection.runCmds(1, COMMAND_TO_RUN)
# print header
print '\n\n'
print 'Interface inOctets outOctets'
# print octets for each interface
for dev_interface in cmd_results[0]['interfaces']:
if 'Ethernet' in dev_interface:
print dev_interface,
print ' '*(12-len(dev_interface)),
print cmd_results[0]['interfaces'][dev_interface]['interfaceCounters']['inOctets'],
print ' '*(12-len(str(cmd_results[0]['interfaces'][dev_interface]['interfaceCounters']['inOctets']))),
print cmd_results[0]['interfaces'][dev_interface]['interfaceCounters']['outOctets']
# print fotter
print '\n\n'
if __name__ == "__main__":
main()
|
RecursiveGreen/spradio-django | refs/heads/master | savepointradio/core/utils.py | 1 | import json
import random
import string
from django.core.exceptions import ImproperlyConfigured
from django.db import connection
def generate_password(length=32):
possible_characters = string.ascii_letters + string.digits + string.punctuation
rng = random.SystemRandom()
return ''.join([rng.choice(possible_characters) for i in range(length)])
def get_len(rawqueryset):
def __len__(self):
params = ['{}'.format(p) for p in self.params]
sql = 'SELECT COUNT(*) FROM (' + rawqueryset.raw_query.format(tuple(params)) + ') B;'
cursor = connection.cursor()
cursor.execute(sql)
row = cursor.fetchone()
return row[0]
return __len__
def get_secret(setting, path):
'''Get the secret variable or return explicit exception.'''
with open(path) as f:
secrets = json.loads(f.read())
try:
return secrets[setting]
except KeyError:
error_msg = 'Set the {0} secret variable in "{1}"'.format(setting, path)
raise ImproperlyConfigured(error_msg)
|
asampat3090/keras | refs/heads/master | keras/utils/test_utils.py | 85 | import numpy as np
def get_test_data(nb_train=1000, nb_test=500, input_shape=(10,), output_shape=(2,),
classification=True, nb_class=2):
'''
classification=True overrides output_shape
(i.e. output_shape is set to (1,)) and the output
consists in integers in [0, nb_class-1].
Otherwise: float output with shape output_shape.
'''
nb_sample = nb_train + nb_test
if classification:
y = np.random.randint(0, nb_class, size=(nb_sample, 1))
X = np.zeros((nb_sample,) + input_shape)
for i in range(nb_sample):
X[i] = np.random.normal(loc=y[i], scale=1.0, size=input_shape)
else:
y_loc = np.random.random((nb_sample,))
X = np.zeros((nb_sample,) + input_shape)
y = np.zeros((nb_sample,) + output_shape)
for i in range(nb_sample):
X[i] = np.random.normal(loc=y_loc[i], scale=1.0, size=input_shape)
y[i] = np.random.normal(loc=y_loc[i], scale=1.0, size=output_shape)
return (X[:nb_train], y[:nb_train]), (X[nb_train:], y[nb_train:])
|
c0hen/django-venv | refs/heads/master | lib/python3.4/site-packages/django/template/loaders/locmem.py | 464 | """
Wrapper for loading templates from a plain Python dict.
"""
import warnings
from django.template import Origin, TemplateDoesNotExist
from django.utils.deprecation import RemovedInDjango20Warning
from .base import Loader as BaseLoader
class Loader(BaseLoader):
def __init__(self, engine, templates_dict):
self.templates_dict = templates_dict
super(Loader, self).__init__(engine)
def get_contents(self, origin):
try:
return self.templates_dict[origin.name]
except KeyError:
raise TemplateDoesNotExist(origin)
def get_template_sources(self, template_name):
yield Origin(
name=template_name,
template_name=template_name,
loader=self,
)
def load_template_source(self, template_name, template_dirs=None):
warnings.warn(
'The load_template_sources() method is deprecated. Use '
'get_template() or get_contents() instead.',
RemovedInDjango20Warning,
)
try:
return self.templates_dict[template_name], template_name
except KeyError:
raise TemplateDoesNotExist(template_name)
|
jalavik/invenio-records | refs/heads/master | invenio_records/recordext/functions/get_record_collections.py | 4 | # -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2015 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""Record field function."""
from invenio_records.signals import (
before_record_insert,
before_record_update,
)
from six import iteritems
from invenio_utils.datastructures import LazyDict
from invenio_search.api import Query
COLLECTIONS_DELETED_RECORDS = '{dbquery} AND NOT collection:"DELETED"'
def _queries():
"""Preprocess collection queries."""
from invenio_ext.sqlalchemy import db
from invenio_collections.models import Collection
return dict(
(collection.name, dict(
query=Query(COLLECTIONS_DELETED_RECORDS.format(
dbquery=collection.dbquery)
),
ancestors=set(c.name for c in collection.ancestors
if c.dbquery is None)
))
for collection in Collection.query.filter(
Collection.dbquery.isnot(None),
db.not_(Collection.dbquery.like('hostedcollection:%'))
).all()
)
queries = LazyDict(_queries)
def get_record_collections(record):
"""Return list of collections to which record belongs to.
:record: Record instance
:returns: list of collection names
"""
output = set()
for name, data in iteritems(queries):
if data['query'].match(record):
output.add(name)
output |= data['ancestors']
return list(output)
@before_record_insert.connect
@before_record_update.connect
def update_collections(sender, *args, **kwargs):
sender['_collections'] = get_record_collections(sender)
|
brian-rose/climlab | refs/heads/main | climlab/dynamics/budyko_transport.py | 1 | from __future__ import division
from climlab.process.energy_budget import EnergyBudget
from climlab.domain.field import global_mean
class BudykoTransport(EnergyBudget):
r"""calculates the 1 dimensional heat transport as the difference
between the local temperature and the global mean temperature.
:param float b: budyko transport parameter \n
- unit: :math:`\\textrm{W} / \\left( \\textrm{m}^2 \\ ^{\circ} \\textrm{C} \\right)` \n
- default value: ``3.81``
As BudykoTransport is a :class:`~climlab.process.process.Process` it needs
a state do be defined on. See example for details.
**Computation Details:** \n
In a global Energy Balance Model
.. math::
C \\frac{dT}{dt} = R\downarrow - R\uparrow - H
with model state :math:`T`, the energy transport term :math:`H`
can be described as
.. math::
H = b [T - \\bar{T}]
where :math:`T` is a vector of the model temperature and :math:`\\bar{T}`
describes the mean value of :math:`T`.
For further information see :cite:`Budyko_1969`.
:Example:
Budyko Transport as a standalone process:
.. plot:: code_input_manual/example_budyko_transport.py
:include-source:
"""
# implemented by m-kreuzer
def __init__(self, b=3.81, **kwargs):
super(BudykoTransport, self).__init__(**kwargs)
self.b = b
@property
def b(self):
r"""the budyko transport parameter in unit
:math:`\\frac{\\textrm{W}}{\\textrm{m}^2 \\textrm{K}}`
:getter: returns the budyko transport parameter
:setter: sets the budyko transport parameter
:type: float
"""
return self._b
@b.setter
def b(self, value):
self._b = value
self.param['b'] = value
def _compute_heating_rates(self):
"""Computes energy flux convergences to get heating rates in :math:`W/m^2`.
"""
for varname, value in self.state.items():
self.heating_rate[varname] = - self.b * (value - global_mean(value))
|
gorlemik/selenium | refs/heads/master | py/selenium/webdriver/common/html5/__init__.py | 2454 | # Licensed to the Software Freedom Conservancy (SFC) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The SFC licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
|
dannyperry571/theapprentice | refs/heads/master | script.module.youtube.dl/lib/youtube_dl/extractor/tv3.py | 69 | # coding: utf-8
from __future__ import unicode_literals
from .common import InfoExtractor
class TV3IE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?tv3\.co\.nz/(?P<id>[^/]+)/tabid/\d+/articleID/\d+/MCat/\d+/Default\.aspx'
_TEST = {
'url': 'http://www.tv3.co.nz/MOTORSPORT-SRS-SsangYong-Hampton-Downs-Round-3/tabid/3692/articleID/121615/MCat/2915/Default.aspx',
'info_dict': {
'id': '4659127992001',
'ext': 'mp4',
'title': 'CRC Motorsport: SRS SsangYong Hampton Downs Round 3 - S2015 Ep3',
'description': 'SsangYong Racing Series returns for Round 3 with drivers from New Zealand and Australia taking to the grid at Hampton Downs raceway.',
'uploader_id': '3812193411001',
'upload_date': '20151213',
'timestamp': 1449975272,
},
'expected_warnings': [
'Failed to download MPD manifest'
],
'params': {
# m3u8 download
'skip_download': True,
},
}
BRIGHTCOVE_URL_TEMPLATE = 'http://players.brightcove.net/3812193411001/default_default/index.html?videoId=%s'
def _real_extract(self, url):
display_id = self._match_id(url)
webpage = self._download_webpage(url, display_id)
brightcove_id = self._search_regex(r'<param\s*name="@videoPlayer"\s*value="(\d+)"', webpage, 'brightcove id')
return self.url_result(self.BRIGHTCOVE_URL_TEMPLATE % brightcove_id, 'BrightcoveNew', brightcove_id)
|