repo_name
stringlengths 6
100
| path
stringlengths 4
294
| copies
stringlengths 1
5
| size
stringlengths 4
6
| content
stringlengths 606
896k
| license
stringclasses 15
values |
---|---|---|---|---|---|
ironbox360/django | django/contrib/gis/db/backends/postgis/models.py | 396 | 2158 | """
The GeometryColumns and SpatialRefSys models for the PostGIS backend.
"""
from django.contrib.gis.db.backends.base.models import SpatialRefSysMixin
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class PostGISGeometryColumns(models.Model):
"""
The 'geometry_columns' table from the PostGIS. See the PostGIS
documentation at Ch. 4.3.2.
On PostGIS 2, this is a view.
"""
f_table_catalog = models.CharField(max_length=256)
f_table_schema = models.CharField(max_length=256)
f_table_name = models.CharField(max_length=256)
f_geometry_column = models.CharField(max_length=256)
coord_dimension = models.IntegerField()
srid = models.IntegerField(primary_key=True)
type = models.CharField(max_length=30)
class Meta:
app_label = 'gis'
db_table = 'geometry_columns'
managed = False
@classmethod
def table_name_col(cls):
"""
Returns the name of the metadata column used to store the feature table
name.
"""
return 'f_table_name'
@classmethod
def geom_col_name(cls):
"""
Returns the name of the metadata column used to store the feature
geometry column.
"""
return 'f_geometry_column'
def __str__(self):
return "%s.%s - %dD %s field (SRID: %d)" % \
(self.f_table_name, self.f_geometry_column,
self.coord_dimension, self.type, self.srid)
class PostGISSpatialRefSys(models.Model, SpatialRefSysMixin):
"""
The 'spatial_ref_sys' table from PostGIS. See the PostGIS
documentaiton at Ch. 4.2.1.
"""
srid = models.IntegerField(primary_key=True)
auth_name = models.CharField(max_length=256)
auth_srid = models.IntegerField()
srtext = models.CharField(max_length=2048)
proj4text = models.CharField(max_length=2048)
class Meta:
app_label = 'gis'
db_table = 'spatial_ref_sys'
managed = False
@property
def wkt(self):
return self.srtext
@classmethod
def wkt_col(cls):
return 'srtext'
| bsd-3-clause |
ankur-gupta91/horizon-net-ip | openstack_dashboard/dashboards/project/access_and_security/keypairs/tests.py | 7 | 10966 | # Copyright 2012 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.core.urlresolvers import reverse
from django import http
from mox3.mox import IsA # noqa
import six
from openstack_dashboard import api
from openstack_dashboard.dashboards.project.access_and_security.\
keypairs.forms import CreateKeypair
from openstack_dashboard.dashboards.project.access_and_security.\
keypairs.forms import KEYPAIR_ERROR_MESSAGES
from openstack_dashboard.test import helpers as test
INDEX_VIEW_URL = reverse('horizon:project:access_and_security:index')
class KeyPairViewTests(test.TestCase):
def test_delete_keypair(self):
keypair = self.keypairs.first()
self.mox.StubOutWithMock(api.network, 'floating_ip_supported')
self.mox.StubOutWithMock(api.nova, 'keypair_list')
self.mox.StubOutWithMock(api.nova, 'keypair_delete')
# floating_ip_supported is called in Floating IP tab allowed().
api.network.floating_ip_supported(IsA(http.HttpRequest)) \
.AndReturn(True)
api.nova.keypair_list(IsA(http.HttpRequest)) \
.AndReturn(self.keypairs.list())
api.nova.keypair_delete(IsA(http.HttpRequest), keypair.name)
self.mox.ReplayAll()
formData = {'action': 'keypairs__delete__%s' % keypair.name}
res = self.client.post(INDEX_VIEW_URL, formData)
self.assertRedirectsNoFollow(res, INDEX_VIEW_URL)
def test_delete_keypair_exception(self):
keypair = self.keypairs.first()
self.mox.StubOutWithMock(api.network, 'floating_ip_supported')
self.mox.StubOutWithMock(api.nova, 'keypair_list')
self.mox.StubOutWithMock(api.nova, 'keypair_delete')
# floating_ip_supported is called in Floating IP tab allowed().
api.network.floating_ip_supported(IsA(http.HttpRequest)) \
.AndReturn(True)
api.nova.keypair_list(IsA(http.HttpRequest)) \
.AndReturn(self.keypairs.list())
api.nova.keypair_delete(IsA(http.HttpRequest), keypair.name) \
.AndRaise(self.exceptions.nova)
self.mox.ReplayAll()
formData = {'action': 'keypairs__delete__%s' % keypair.name}
res = self.client.post(INDEX_VIEW_URL, formData)
self.assertRedirectsNoFollow(res, INDEX_VIEW_URL)
def test_create_keypair_get(self):
res = self.client.get(
reverse('horizon:project:access_and_security:keypairs:create'))
self.assertTemplateUsed(
res, 'project/access_and_security/keypairs/create.html')
def test_download_keypair_get(self):
keypair_name = "keypair"
context = {'keypair_name': keypair_name}
url = reverse('horizon:project:access_and_security:keypairs:download',
kwargs={'keypair_name': keypair_name})
res = self.client.get(url, context)
self.assertTemplateUsed(
res, 'project/access_and_security/keypairs/download.html')
def test_generate_keypair_get(self):
keypair = self.keypairs.first()
keypair.private_key = "secret"
self.mox.StubOutWithMock(api.nova, 'keypair_create')
api.nova.keypair_create(IsA(http.HttpRequest),
keypair.name).AndReturn(keypair)
self.mox.ReplayAll()
context = {'keypair_name': keypair.name}
url = reverse('horizon:project:access_and_security:keypairs:generate',
kwargs={'keypair_name': keypair.name})
res = self.client.get(url, context)
self.assertTrue(res.has_header('content-disposition'))
def test_keypair_detail_get(self):
keypair = self.keypairs.first()
keypair.private_key = "secrete"
self.mox.StubOutWithMock(api.nova, 'keypair_get')
api.nova.keypair_get(IsA(http.HttpRequest),
keypair.name).AndReturn(keypair)
self.mox.ReplayAll()
context = {'keypair_name': keypair.name}
url = reverse('horizon:project:access_and_security:keypairs:detail',
kwargs={'keypair_name': keypair.name})
res = self.client.get(url, context)
# Note(Itxaka): With breadcrumbs, the title is in a list as active
self.assertContains(res, '<li class="active">Key Pair Details</li>',
1, 200)
self.assertContains(res, "<dd>%s</dd>" % keypair.name, 1, 200)
@test.create_stubs({api.nova: ("keypair_create", "keypair_delete")})
def test_regenerate_keypair_get(self):
keypair = self.keypairs.first()
keypair.private_key = "secret"
optional_param = "regenerate"
api.nova.keypair_delete(IsA(http.HttpRequest), keypair.name)
api.nova.keypair_create(IsA(http.HttpRequest),
keypair.name).AndReturn(keypair)
self.mox.ReplayAll()
url = reverse('horizon:project:access_and_security:keypairs:generate',
kwargs={'keypair_name': keypair.name,
'optional': optional_param})
res = self.client.get(url)
self.assertTrue(res.has_header('content-disposition'))
@test.create_stubs({api.nova: ("keypair_import",)})
def test_import_keypair(self):
key1_name = "new_key_pair"
public_key = "ssh-rsa ABCDEFGHIJKLMNOPQR\r\n" \
"STUVWXYZ1234567890\r" \
"XXYYZZ user@computer\n\n"
api.nova.keypair_import(IsA(http.HttpRequest), key1_name,
public_key.replace("\r", "").replace("\n", ""))
self.mox.ReplayAll()
formData = {'method': 'ImportKeypair',
'name': key1_name,
'public_key': public_key}
url = reverse('horizon:project:access_and_security:keypairs:import')
res = self.client.post(url, formData)
self.assertMessageCount(res, success=1)
@test.create_stubs({api.nova: ("keypair_import",)})
def test_import_keypair_invalid_key(self):
key_name = "new_key_pair"
public_key = "ABCDEF"
api.nova.keypair_import(IsA(http.HttpRequest), key_name, public_key) \
.AndRaise(self.exceptions.nova)
self.mox.ReplayAll()
formData = {'method': 'ImportKeypair',
'name': key_name,
'public_key': public_key}
url = reverse('horizon:project:access_and_security:keypairs:import')
res = self.client.post(url, formData, follow=True)
self.assertEqual(res.redirect_chain, [])
msg = 'Unable to import key pair.'
self.assertFormErrors(res, count=1, message=msg)
def test_import_keypair_invalid_key_name(self):
key_name = "invalid#key?name=!"
public_key = "ABCDEF"
formData = {'method': 'ImportKeypair',
'name': key_name,
'public_key': public_key}
url = reverse('horizon:project:access_and_security:keypairs:import')
res = self.client.post(url, formData, follow=True)
self.assertEqual(res.redirect_chain, [])
msg = six.text_type(KEYPAIR_ERROR_MESSAGES['invalid'])
self.assertFormErrors(res, count=1, message=msg)
@test.create_stubs({api.nova: ("keypair_create",)})
def test_generate_keypair_exception(self):
keypair = self.keypairs.first()
api.nova.keypair_create(IsA(http.HttpRequest), keypair.name) \
.AndRaise(self.exceptions.nova)
self.mox.ReplayAll()
context = {'keypair_name': keypair.name}
url = reverse('horizon:project:access_and_security:keypairs:generate',
kwargs={'keypair_name': keypair.name})
res = self.client.get(url, context)
self.assertRedirectsNoFollow(
res, reverse('horizon:project:access_and_security:index'))
@test.create_stubs({api.nova: ("keypair_import",)})
def test_import_keypair_with_regex_defined_name(self):
key1_name = "new-key-pair with_regex"
public_key = "ssh-rsa ABCDEFGHIJKLMNOPQR\r\n" \
"STUVWXYZ1234567890\r" \
"XXYYZZ user@computer\n\n"
api.nova.keypair_import(IsA(http.HttpRequest), key1_name,
public_key.replace("\r", "").replace("\n", ""))
self.mox.ReplayAll()
formData = {'method': 'ImportKeypair',
'name': key1_name,
'public_key': public_key}
url = reverse('horizon:project:access_and_security:keypairs:import')
res = self.client.post(url, formData)
self.assertMessageCount(res, success=1)
@test.create_stubs({api.nova: ("keypair_create",)})
def test_create_keypair_with_regex_name_get(self):
keypair = self.keypairs.first()
keypair.name = "key-space pair-regex_name-0123456789"
keypair.private_key = "secret"
api.nova.keypair_create(IsA(http.HttpRequest),
keypair.name).AndReturn(keypair)
self.mox.ReplayAll()
context = {'keypair_name': keypair.name}
url = reverse('horizon:project:access_and_security:keypairs:generate',
kwargs={'keypair_name': keypair.name})
res = self.client.get(url, context)
self.assertTrue(res.has_header('content-disposition'))
def test_download_with_regex_name_get(self):
keypair_name = "key pair-regex_name-0123456789"
context = {'keypair_name': keypair_name}
url = reverse('horizon:project:access_and_security:keypairs:download',
kwargs={'keypair_name': keypair_name})
res = self.client.get(url, context)
self.assertTemplateUsed(
res, 'project/access_and_security/keypairs/download.html')
@test.create_stubs({api.nova: ('keypair_list',)})
def test_create_duplicate_keypair(self):
keypair_name = self.keypairs.first().name
api.nova.keypair_list(IsA(http.HttpRequest)) \
.AndReturn(self.keypairs.list())
self.mox.ReplayAll()
form = CreateKeypair(self.request, data={'name': keypair_name})
self.assertFalse(form.is_valid())
self.assertIn('The name is already in use.',
form.errors['name'][0])
| apache-2.0 |
ProfessionalIT/maxigenios-website | sdk/google_appengine/lib/PyAMF-0.6.1/pyamf/util/imports.py | 45 | 3707 | # Copyright (c) The PyAMF Project.
# See LICENSE.txt for details.
"""
Tools for doing dynamic imports.
@since: 0.3
"""
import sys
__all__ = ['when_imported']
def when_imported(name, *hooks):
"""
Call C{hook(module)} when module named C{name} is first imported. C{name}
must be a fully qualified (i.e. absolute) module name.
C{hook} must accept one argument: which will be the imported module object.
If the module has already been imported, 'hook(module)' is called
immediately, and the module object is returned from this function. If the
module has not been imported, then the hook is called when the module is
first imported.
"""
global finder
finder.when_imported(name, *hooks)
class ModuleFinder(object):
"""
This is a special module finder object that executes a collection of
callables when a specific module has been imported. An instance of this
is placed in C{sys.meta_path}, which is consulted before C{sys.modules} -
allowing us to provide this functionality.
@ivar post_load_hooks: C{dict} of C{full module path -> callable} to be
executed when the module is imported.
@ivar loaded_modules: C{list} of modules that this finder has seen. Used
to stop recursive imports in L{load_module}
@see: L{when_imported}
@since: 0.5
"""
def __init__(self):
self.post_load_hooks = {}
self.loaded_modules = []
def find_module(self, name, path=None):
"""
Called when an import is made. If there are hooks waiting for this
module to be imported then we stop the normal import process and
manually load the module.
@param name: The name of the module being imported.
@param path The root path of the module (if a package). We ignore this.
@return: If we want to hook this module, we return a C{loader}
interface (which is this instance again). If not we return C{None}
to allow the standard import process to continue.
"""
if name in self.loaded_modules:
return None
hooks = self.post_load_hooks.get(name, None)
if hooks:
return self
def load_module(self, name):
"""
If we get this far, then there are hooks waiting to be called on
import of this module. We manually load the module and then run the
hooks.
@param name: The name of the module to import.
"""
self.loaded_modules.append(name)
try:
__import__(name, {}, {}, [])
mod = sys.modules[name]
self._run_hooks(name, mod)
except:
self.loaded_modules.pop()
raise
return mod
def when_imported(self, name, *hooks):
"""
@see: L{when_imported}
"""
if name in sys.modules:
for hook in hooks:
hook(sys.modules[name])
return
h = self.post_load_hooks.setdefault(name, [])
h.extend(hooks)
def _run_hooks(self, name, module):
"""
Run all hooks for a module.
"""
hooks = self.post_load_hooks.pop(name, [])
for hook in hooks:
hook(module)
def __getstate__(self):
return (self.post_load_hooks.copy(), self.loaded_modules[:])
def __setstate__(self, state):
self.post_load_hooks, self.loaded_modules = state
def _init():
"""
Internal function to install the module finder.
"""
global finder
if finder is None:
finder = ModuleFinder()
if finder not in sys.meta_path:
sys.meta_path.insert(0, finder)
finder = None
_init()
| mit |
Alwnikrotikz/chimerascan | chimerascan/deprecated/sam_v1.py | 6 | 8725 | '''
Created on Jun 2, 2011
@author: mkiyer
chimerascan: chimeric transcript discovery using RNA-seq
Copyright (C) 2011 Matthew Iyer
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import operator
from chimerascan import pysam
from seq import DNA_reverse_complement
#
# constants used for CIGAR alignments
#
CIGAR_M = 0 #match Alignment match (can be a sequence match or mismatch)
CIGAR_I = 1 #insertion Insertion to the reference
CIGAR_D = 2 #deletion Deletion from the reference
CIGAR_N = 3 #skip Skipped region from the reference
CIGAR_S = 4 #softclip Soft clip on the read (clipped sequence present in <seq>)
CIGAR_H = 5 #hardclip Hard clip on the read (clipped sequence NOT present in <seq>)
CIGAR_P = 6 #padding Padding (silent deletion from the padded reference sequence)
def parse_reads_by_qname(samfh):
"""
generator function to parse and return lists of
reads that share the same qname
"""
reads = []
for read in samfh:
if len(reads) > 0 and read.qname != reads[-1].qname:
yield reads
reads = []
reads.append(read)
if len(reads) > 0:
yield reads
def parse_pe_reads(bamfh):
pe_reads = ([], [])
# reads must be sorted by qname
num_reads = 0
prev_qname = None
for read in bamfh:
# get read attributes
qname = read.qname
readnum = 1 if read.is_read2 else 0
# if query name changes we have completely finished
# the fragment and can reset the read data
if num_reads > 0 and qname != prev_qname:
yield pe_reads
# reset state variables
pe_reads = ([], [])
num_reads = 0
pe_reads[readnum].append(read)
prev_qname = qname
num_reads += 1
if num_reads > 0:
yield pe_reads
def parse_unpaired_pe_reads(bamfh):
"""
parses alignments that were aligned in single read mode
and hence all hits are labeled as 'read1' and lack mate
information. instead the read1 read2 information is
attached to the 'qname' field
"""
pe_reads = ([], [])
num_reads = 0
prev_qname = None
for read in bamfh:
# extract read1/2 from qname
readnum = int(read.qname[-1])
if readnum == 1:
read.is_read1 = True
mate = 0
elif readnum == 2:
mate = 1
read.is_read2 = True
# reconstitute correct qname
qname = read.qname[:-2]
read.qname = qname
# if query name changes we have completely finished
# the fragment and can reset the read data
if num_reads > 0 and qname != prev_qname:
yield pe_reads
# reset state variables
pe_reads = ([], [])
num_reads = 0
pe_reads[mate].append(read)
prev_qname = qname
num_reads += 1
if num_reads > 0:
yield pe_reads
def group_read_pairs(pe_reads):
"""
Given tuple of ([read1 reads],[read2 reads]) paired-end read alignments
return mate-pairs and unpaired reads
"""
# group paired reads
paired_reads = ([],[])
unpaired_reads = ([],[])
for rnum,reads in enumerate(pe_reads):
for r in reads:
if r.is_proper_pair:
paired_reads[rnum].append(r)
else:
unpaired_reads[rnum].append(r)
# check if we have at least one pair
pairs = []
if all((len(reads) > 0) for reads in paired_reads):
# index read1 by mate reference name and position
rdict = {}
for r in paired_reads[0]:
rdict[(r.mrnm,r.mpos)] = r
# iterate through read2 and get mate pairs
for r2 in paired_reads[1]:
r1 = rdict[(r.rname,r.pos)]
pairs.append((r1,r2))
return pairs, unpaired_reads
def select_best_scoring_pairs(pairs):
"""
return the set of read pairs (provided as a list of tuples) with
the highest summed alignment score
"""
if len(pairs) == 0:
return []
# gather alignment scores for each pair
pair_scores = [(pair[0].opt('AS') + pair[1].opt('AS'), pair) for pair in pairs]
pair_scores.sort(key=operator.itemgetter(0))
best_score = pair_scores[0][0]
best_pairs = [pair_scores[0][1]]
for score,pair in pair_scores[1:]:
if score < best_score:
break
best_pairs.append(pair)
return best_pairs
def select_primary_alignments(reads):
"""
return only reads that lack the secondary alignment bit
"""
if len(reads) == 0:
return []
# sort reads by number of mismatches
unmapped_reads = []
primary_reads = []
for r in reads:
if r.is_unmapped:
unmapped_reads.append(r)
elif not r.is_secondary:
primary_reads.append(r)
if len(primary_reads) == 0:
assert len(unmapped_reads) > 0
return unmapped_reads
return primary_reads
def select_best_mismatch_strata(reads, mismatch_tolerance=0):
if len(reads) == 0:
return []
# sort reads by number of mismatches
mapped_reads = []
unmapped_reads = []
for r in reads:
if r.is_unmapped:
unmapped_reads.append(r)
else:
mapped_reads.append((r.opt('NM'), r))
if len(mapped_reads) == 0:
return unmapped_reads
sorted_reads = sorted(mapped_reads, key=operator.itemgetter(0))
best_nm = sorted_reads[0][0]
worst_nm = sorted_reads[-1][0]
sorted_reads.extend((worst_nm+1, r) for r in unmapped_reads)
# choose reads within a certain mismatch tolerance
best_reads = []
for mismatches, r in sorted_reads:
if mismatches > (best_nm + mismatch_tolerance):
break
best_reads.append(r)
return best_reads
def copy_read(r):
a = pysam.AlignedRead()
a.qname = r.qname
a.seq = r.seq
a.flag = r.flag
a.rname = r.rname
a.pos = r.pos
a.mapq = r.mapq
a.cigar = r.cigar
a.mrnm = r.mrnm
a.mpos = r.mpos
a.isize = r.isize
a.qual = r.qual
a.tags = r.tags
return a
def soft_pad_read(fq, r):
"""
'fq' is the fastq record
'r' in the AlignedRead SAM read
"""
# make sequence soft clipped
ext_length = len(fq.seq) - len(r.seq)
cigar_softclip = [(CIGAR_S, ext_length)]
cigar = r.cigar
# reconstitute full length sequence in read
if r.is_reverse:
seq = DNA_reverse_complement(fq.seq)
qual = fq.qual[::-1]
if (cigar is not None) and (ext_length > 0):
cigar = cigar_softclip + cigar
else:
seq = fq.seq
qual = fq.qual
if (cigar is not None) and (ext_length > 0):
cigar = cigar + cigar_softclip
# replace read field
r.seq = seq
r.qual = qual
r.cigar = cigar
def pair_reads(r1, r2, tags=None):
'''
fill in paired-end fields in SAM record
'''
if tags is None:
tags = []
# convert read1 to paired-end
r1.is_paired = True
r1.is_proper_pair = True
r1.is_read1 = True
r1.mate_is_reverse = r2.is_reverse
r1.mate_is_unmapped = r2.is_unmapped
r1.mpos = r2.pos
r1.mrnm = r2.rname
r1.tags = r1.tags + tags
# convert read2 to paired-end
r2.is_paired = True
r2.is_proper_pair = True
r2.is_read2 = True
r2.mate_is_reverse = r1.is_reverse
r2.mate_is_unmapped = r1.is_unmapped
r2.mpos = r1.pos
r2.mrnm = r1.rname
r2.tags = r2.tags + tags
# compute insert size
if r1.rname != r2.rname:
r1.isize = 0
r2.isize = 0
elif r1.pos > r2.pos:
isize = r1.aend - r2.pos
r1.isize = -isize
r2.isize = isize
else:
isize = r2.aend - r1.pos
r1.isize = isize
r2.isize = -isize
def get_clipped_interval(r):
cigar = r.cigar
padstart, padend = r.pos, r.aend
if len(cigar) > 1:
if (cigar[0][0] == CIGAR_S or
cigar[0][0] == CIGAR_H):
padstart -= cigar[0][1]
elif (cigar[-1][0] == CIGAR_S or
cigar[-1][0] == CIGAR_H):
padend += cigar[-1][1]
return padstart, padend
| gpl-3.0 |
mrry/tensorflow | tensorflow/models/embedding/word2vec_optimized_test.py | 31 | 2294 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for word2vec_optimized module."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import tensorflow as tf
from tensorflow.models.embedding import word2vec_optimized
flags = tf.app.flags
FLAGS = flags.FLAGS
class Word2VecTest(tf.test.TestCase):
def setUp(self):
FLAGS.train_data = os.path.join(self.get_temp_dir() + "test-text.txt")
FLAGS.eval_data = os.path.join(self.get_temp_dir() + "eval-text.txt")
FLAGS.save_path = self.get_temp_dir()
with open(FLAGS.train_data, "w") as f:
f.write(
"""alice was beginning to get very tired of sitting by her sister on
the bank, and of having nothing to do: once or twice she had peeped
into the book her sister was reading, but it had no pictures or
conversations in it, 'and what is the use of a book,' thought alice
'without pictures or conversations?' So she was considering in her own
mind (as well as she could, for the hot day made her feel very sleepy
and stupid), whether the pleasure of making a daisy-chain would be
worth the trouble of getting up and picking the daisies, when suddenly
a White rabbit with pink eyes ran close by her.\n""")
with open(FLAGS.eval_data, "w") as f:
f.write("alice she rabbit once\n")
def testWord2VecOptimized(self):
FLAGS.batch_size = 5
FLAGS.num_neg_samples = 10
FLAGS.epochs_to_train = 1
FLAGS.min_count = 0
word2vec_optimized.main([])
if __name__ == "__main__":
tf.test.main()
| apache-2.0 |
jamilatta/scielo-manager | scielomanager/scielomanager/utils/modelmanagers/base.py | 2 | 1789 | # coding: utf-8
"""
The UserObjectManager interface
===============================
Each model object that aims to be contextualized by the current
app user and the visibility rules defined, must provide a
manager called ``userobjects`` following the context protocol:
Custom instance of ``models.Manager``
-------------------------------------
* ``get_query_set`` returns a custom subclass of models.query.QuerySet;
* ``all`` returns all objects the user can access;
* ``active`` returns a subset of ``all``, only with objects from
the active collection.
Custom instance of ``models.query.QuerySet``
--------------------------------------------
* ``all`` returns all objects the user can access;
* ``active`` returns all objects from the active collection.
* ``startswith`` (optional) returns all objects with the given
initial char in a meaningful field. this is used for sorting
and presentation purposes.
* ``simple_search`` (optional) performs a simple search query on one or more
meaningful fields. accepts only 1 string as search the search term.
* ``available`` returns all objects not marked as trash.
* ``unavailable`` returns all objects marked as trash.
"""
import caching.base
class UserObjectQuerySet(caching.base.CachingQuerySet):
"""
Provides a basic implementation of userobject querysets with
caching features.
"""
def available(self):
return self
def unavailable(self):
return self.none()
class UserObjectManager(caching.base.CachingManager):
"""
Provides a basic implementation of userobject managers with
caching features.
"""
def all(self, **kwargs):
return self.get_query_set().all(**kwargs)
def active(self, **kwargs):
return self.get_query_set().active(**kwargs)
| bsd-2-clause |
tyndyll/py-morsecode | docs/source/conf.py | 1 | 8359 | # -*- coding: utf-8 -*-
#
# MorseCode documentation build configuration file, created by
# sphinx-quickstart on Tue Nov 26 16:14:19 2013.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath("../.."))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.intersphinx',
'sphinx.ext.ifconfig',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'MorseCode'
copyright = u'2013, Tyndyll'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.0.1'
# The full version, including alpha/beta/rc tags.
release = '0.0.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'MorseCodedoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'MorseCode.tex', u'MorseCode Documentation',
u'Tyndyll', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'morsecode', u'MorseCode Documentation',
[u'Tyndyll'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'MorseCode', u'MorseCode Documentation',
u'Tyndyll', 'MorseCode', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'http://docs.python.org/': None}
| bsd-2-clause |
praveenkumar/dorrie | dorrie/comps/models.py | 1 | 1663 | # Dorrie - Web interface for building Fedora Spins/Remixes.
# Copyright (C) 2009 Red Hat Inc.
# Author: Shreyank Gupta <sgupta@redhat.com>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.db import models
class Spin(models.Model):
"""Class for the releases"""
name = models.TextField(
help_text="The name of the spin.")
language = models.TextField()
timezone = models.TextField()
rootpwd = models.TextField()
baseks = models.TextField()
gplus = models.ManyToManyField('Group', related_name='gplus_set')
gminus = models.ManyToManyField('Group', related_name='gminus_set')
pplus = models.ManyToManyField('Package', related_name='pplus_set')
pminus = models.ManyToManyField('Package', related_name='pminus_set')
pid = models.IntegerField(default=0)
class Group(models.Model):
"""Package Groups"""
name = models.TextField(help_text="The name of the package group.")
class Package(models.Model):
"""A Package."""
name = models.TextField(help_text="The name of the package.")
| agpl-3.0 |
cad-lab/blog | pelicanconf.py | 1 | 2136 | #!/usr/bin/env python
# -*- coding: utf-8 -*- #
from __future__ import unicode_literals
AUTHOR = 'kmol'
SITENAME = 'CADLab (虎尾科大MDE)'
#SITEURL = 'http://cad-lab.github.io/blog/'
# 不要用文章所在目錄作為類別
USE_FOLDER_AS_CATEGORY = False
#PATH = 'content'
#OUTPUT_PATH = 'output'
TIMEZONE = 'Asia/Taipei'
DEFAULT_LANG = 'en'
# Feed generation is usually not desired when developing
FEED_ALL_ATOM = None
CATEGORY_FEED_ATOM = None
TRANSLATION_FEED_ATOM = None
AUTHOR_FEED_ATOM = None
AUTHOR_FEED_RSS = None
# Blogroll
LINKS = (('Pelican', 'http://getpelican.com/'),
('pelican-bootstrap3', 'https://github.com/DandyDev/pelican-bootstrap3/'),
('pelican-plugins', 'https://github.com/getpelican/pelican-plugins'),
('Tipue search', 'https://github.com/Tipue/Tipue-Search'),)
# Social widget
#SOCIAL = (('You can add links in your config file', '#'),('Another social link', '#'),)
DEFAULT_PAGINATION = 10
# Uncomment following line if you want document-relative URLs when developing
#RELATIVE_URLS = True
# 必須絕對目錄或相對於設定檔案所在目錄
PLUGIN_PATHS = ['plugin']
PLUGINS = ['liquid_tags.notebook', 'summary', 'tipue_search', 'sitemap', 'render_math']
# for sitemap plugin
SITEMAP = {
'format': 'xml',
'priorities': {
'articles': 0.5,
'indexes': 0.5,
'pages': 0.5
},
'changefreqs': {
'articles': 'monthly',
'indexes': 'daily',
'pages': 'monthly'
}
}
# search is for Tipue search
DIRECT_TEMPLATES = (('index', 'tags', 'categories', 'authors', 'archives', 'search'))
# for pelican-bootstrap3 theme settings
#TAG_CLOUD_MAX_ITEMS = 50
DISPLAY_CATEGORIES_ON_SIDEBAR = True
DISPLAY_RECENT_POSTS_ON_SIDEBAR = True
DISPLAY_TAGS_ON_SIDEBAR = True
DISPLAY_TAGS_INLINE = True
TAGS_URL = "tags.html"
CATEGORIES_URL = "categories.html"
#SHOW_ARTICLE_AUTHOR = True
#MENUITEMS = [('Home', '/'), ('Archives', '/archives.html'), ('Search', '/search.html')]
# 希望將部份常用的 Javascript 最新版程式庫放到這裡, 可以透過 http://cadlab.mde.tw/post/js/ 呼叫
STATIC_PATHS = ['js', 'by'] | agpl-3.0 |
chongtianfeiyu/kbengine | kbe/src/lib/python/Lib/encodings/cp858.py | 270 | 34015 | """ Python Character Mapping Codec for CP858, modified from cp850.
"""
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_map)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_map)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='cp858',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Map
decoding_map = codecs.make_identity_dict(range(256))
decoding_map.update({
0x0080: 0x00c7, # LATIN CAPITAL LETTER C WITH CEDILLA
0x0081: 0x00fc, # LATIN SMALL LETTER U WITH DIAERESIS
0x0082: 0x00e9, # LATIN SMALL LETTER E WITH ACUTE
0x0083: 0x00e2, # LATIN SMALL LETTER A WITH CIRCUMFLEX
0x0084: 0x00e4, # LATIN SMALL LETTER A WITH DIAERESIS
0x0085: 0x00e0, # LATIN SMALL LETTER A WITH GRAVE
0x0086: 0x00e5, # LATIN SMALL LETTER A WITH RING ABOVE
0x0087: 0x00e7, # LATIN SMALL LETTER C WITH CEDILLA
0x0088: 0x00ea, # LATIN SMALL LETTER E WITH CIRCUMFLEX
0x0089: 0x00eb, # LATIN SMALL LETTER E WITH DIAERESIS
0x008a: 0x00e8, # LATIN SMALL LETTER E WITH GRAVE
0x008b: 0x00ef, # LATIN SMALL LETTER I WITH DIAERESIS
0x008c: 0x00ee, # LATIN SMALL LETTER I WITH CIRCUMFLEX
0x008d: 0x00ec, # LATIN SMALL LETTER I WITH GRAVE
0x008e: 0x00c4, # LATIN CAPITAL LETTER A WITH DIAERESIS
0x008f: 0x00c5, # LATIN CAPITAL LETTER A WITH RING ABOVE
0x0090: 0x00c9, # LATIN CAPITAL LETTER E WITH ACUTE
0x0091: 0x00e6, # LATIN SMALL LIGATURE AE
0x0092: 0x00c6, # LATIN CAPITAL LIGATURE AE
0x0093: 0x00f4, # LATIN SMALL LETTER O WITH CIRCUMFLEX
0x0094: 0x00f6, # LATIN SMALL LETTER O WITH DIAERESIS
0x0095: 0x00f2, # LATIN SMALL LETTER O WITH GRAVE
0x0096: 0x00fb, # LATIN SMALL LETTER U WITH CIRCUMFLEX
0x0097: 0x00f9, # LATIN SMALL LETTER U WITH GRAVE
0x0098: 0x00ff, # LATIN SMALL LETTER Y WITH DIAERESIS
0x0099: 0x00d6, # LATIN CAPITAL LETTER O WITH DIAERESIS
0x009a: 0x00dc, # LATIN CAPITAL LETTER U WITH DIAERESIS
0x009b: 0x00f8, # LATIN SMALL LETTER O WITH STROKE
0x009c: 0x00a3, # POUND SIGN
0x009d: 0x00d8, # LATIN CAPITAL LETTER O WITH STROKE
0x009e: 0x00d7, # MULTIPLICATION SIGN
0x009f: 0x0192, # LATIN SMALL LETTER F WITH HOOK
0x00a0: 0x00e1, # LATIN SMALL LETTER A WITH ACUTE
0x00a1: 0x00ed, # LATIN SMALL LETTER I WITH ACUTE
0x00a2: 0x00f3, # LATIN SMALL LETTER O WITH ACUTE
0x00a3: 0x00fa, # LATIN SMALL LETTER U WITH ACUTE
0x00a4: 0x00f1, # LATIN SMALL LETTER N WITH TILDE
0x00a5: 0x00d1, # LATIN CAPITAL LETTER N WITH TILDE
0x00a6: 0x00aa, # FEMININE ORDINAL INDICATOR
0x00a7: 0x00ba, # MASCULINE ORDINAL INDICATOR
0x00a8: 0x00bf, # INVERTED QUESTION MARK
0x00a9: 0x00ae, # REGISTERED SIGN
0x00aa: 0x00ac, # NOT SIGN
0x00ab: 0x00bd, # VULGAR FRACTION ONE HALF
0x00ac: 0x00bc, # VULGAR FRACTION ONE QUARTER
0x00ad: 0x00a1, # INVERTED EXCLAMATION MARK
0x00ae: 0x00ab, # LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
0x00af: 0x00bb, # RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
0x00b0: 0x2591, # LIGHT SHADE
0x00b1: 0x2592, # MEDIUM SHADE
0x00b2: 0x2593, # DARK SHADE
0x00b3: 0x2502, # BOX DRAWINGS LIGHT VERTICAL
0x00b4: 0x2524, # BOX DRAWINGS LIGHT VERTICAL AND LEFT
0x00b5: 0x00c1, # LATIN CAPITAL LETTER A WITH ACUTE
0x00b6: 0x00c2, # LATIN CAPITAL LETTER A WITH CIRCUMFLEX
0x00b7: 0x00c0, # LATIN CAPITAL LETTER A WITH GRAVE
0x00b8: 0x00a9, # COPYRIGHT SIGN
0x00b9: 0x2563, # BOX DRAWINGS DOUBLE VERTICAL AND LEFT
0x00ba: 0x2551, # BOX DRAWINGS DOUBLE VERTICAL
0x00bb: 0x2557, # BOX DRAWINGS DOUBLE DOWN AND LEFT
0x00bc: 0x255d, # BOX DRAWINGS DOUBLE UP AND LEFT
0x00bd: 0x00a2, # CENT SIGN
0x00be: 0x00a5, # YEN SIGN
0x00bf: 0x2510, # BOX DRAWINGS LIGHT DOWN AND LEFT
0x00c0: 0x2514, # BOX DRAWINGS LIGHT UP AND RIGHT
0x00c1: 0x2534, # BOX DRAWINGS LIGHT UP AND HORIZONTAL
0x00c2: 0x252c, # BOX DRAWINGS LIGHT DOWN AND HORIZONTAL
0x00c3: 0x251c, # BOX DRAWINGS LIGHT VERTICAL AND RIGHT
0x00c4: 0x2500, # BOX DRAWINGS LIGHT HORIZONTAL
0x00c5: 0x253c, # BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL
0x00c6: 0x00e3, # LATIN SMALL LETTER A WITH TILDE
0x00c7: 0x00c3, # LATIN CAPITAL LETTER A WITH TILDE
0x00c8: 0x255a, # BOX DRAWINGS DOUBLE UP AND RIGHT
0x00c9: 0x2554, # BOX DRAWINGS DOUBLE DOWN AND RIGHT
0x00ca: 0x2569, # BOX DRAWINGS DOUBLE UP AND HORIZONTAL
0x00cb: 0x2566, # BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL
0x00cc: 0x2560, # BOX DRAWINGS DOUBLE VERTICAL AND RIGHT
0x00cd: 0x2550, # BOX DRAWINGS DOUBLE HORIZONTAL
0x00ce: 0x256c, # BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL
0x00cf: 0x00a4, # CURRENCY SIGN
0x00d0: 0x00f0, # LATIN SMALL LETTER ETH
0x00d1: 0x00d0, # LATIN CAPITAL LETTER ETH
0x00d2: 0x00ca, # LATIN CAPITAL LETTER E WITH CIRCUMFLEX
0x00d3: 0x00cb, # LATIN CAPITAL LETTER E WITH DIAERESIS
0x00d4: 0x00c8, # LATIN CAPITAL LETTER E WITH GRAVE
0x00d5: 0x20ac, # EURO SIGN
0x00d6: 0x00cd, # LATIN CAPITAL LETTER I WITH ACUTE
0x00d7: 0x00ce, # LATIN CAPITAL LETTER I WITH CIRCUMFLEX
0x00d8: 0x00cf, # LATIN CAPITAL LETTER I WITH DIAERESIS
0x00d9: 0x2518, # BOX DRAWINGS LIGHT UP AND LEFT
0x00da: 0x250c, # BOX DRAWINGS LIGHT DOWN AND RIGHT
0x00db: 0x2588, # FULL BLOCK
0x00dc: 0x2584, # LOWER HALF BLOCK
0x00dd: 0x00a6, # BROKEN BAR
0x00de: 0x00cc, # LATIN CAPITAL LETTER I WITH GRAVE
0x00df: 0x2580, # UPPER HALF BLOCK
0x00e0: 0x00d3, # LATIN CAPITAL LETTER O WITH ACUTE
0x00e1: 0x00df, # LATIN SMALL LETTER SHARP S
0x00e2: 0x00d4, # LATIN CAPITAL LETTER O WITH CIRCUMFLEX
0x00e3: 0x00d2, # LATIN CAPITAL LETTER O WITH GRAVE
0x00e4: 0x00f5, # LATIN SMALL LETTER O WITH TILDE
0x00e5: 0x00d5, # LATIN CAPITAL LETTER O WITH TILDE
0x00e6: 0x00b5, # MICRO SIGN
0x00e7: 0x00fe, # LATIN SMALL LETTER THORN
0x00e8: 0x00de, # LATIN CAPITAL LETTER THORN
0x00e9: 0x00da, # LATIN CAPITAL LETTER U WITH ACUTE
0x00ea: 0x00db, # LATIN CAPITAL LETTER U WITH CIRCUMFLEX
0x00eb: 0x00d9, # LATIN CAPITAL LETTER U WITH GRAVE
0x00ec: 0x00fd, # LATIN SMALL LETTER Y WITH ACUTE
0x00ed: 0x00dd, # LATIN CAPITAL LETTER Y WITH ACUTE
0x00ee: 0x00af, # MACRON
0x00ef: 0x00b4, # ACUTE ACCENT
0x00f0: 0x00ad, # SOFT HYPHEN
0x00f1: 0x00b1, # PLUS-MINUS SIGN
0x00f2: 0x2017, # DOUBLE LOW LINE
0x00f3: 0x00be, # VULGAR FRACTION THREE QUARTERS
0x00f4: 0x00b6, # PILCROW SIGN
0x00f5: 0x00a7, # SECTION SIGN
0x00f6: 0x00f7, # DIVISION SIGN
0x00f7: 0x00b8, # CEDILLA
0x00f8: 0x00b0, # DEGREE SIGN
0x00f9: 0x00a8, # DIAERESIS
0x00fa: 0x00b7, # MIDDLE DOT
0x00fb: 0x00b9, # SUPERSCRIPT ONE
0x00fc: 0x00b3, # SUPERSCRIPT THREE
0x00fd: 0x00b2, # SUPERSCRIPT TWO
0x00fe: 0x25a0, # BLACK SQUARE
0x00ff: 0x00a0, # NO-BREAK SPACE
})
### Decoding Table
decoding_table = (
'\x00' # 0x0000 -> NULL
'\x01' # 0x0001 -> START OF HEADING
'\x02' # 0x0002 -> START OF TEXT
'\x03' # 0x0003 -> END OF TEXT
'\x04' # 0x0004 -> END OF TRANSMISSION
'\x05' # 0x0005 -> ENQUIRY
'\x06' # 0x0006 -> ACKNOWLEDGE
'\x07' # 0x0007 -> BELL
'\x08' # 0x0008 -> BACKSPACE
'\t' # 0x0009 -> HORIZONTAL TABULATION
'\n' # 0x000a -> LINE FEED
'\x0b' # 0x000b -> VERTICAL TABULATION
'\x0c' # 0x000c -> FORM FEED
'\r' # 0x000d -> CARRIAGE RETURN
'\x0e' # 0x000e -> SHIFT OUT
'\x0f' # 0x000f -> SHIFT IN
'\x10' # 0x0010 -> DATA LINK ESCAPE
'\x11' # 0x0011 -> DEVICE CONTROL ONE
'\x12' # 0x0012 -> DEVICE CONTROL TWO
'\x13' # 0x0013 -> DEVICE CONTROL THREE
'\x14' # 0x0014 -> DEVICE CONTROL FOUR
'\x15' # 0x0015 -> NEGATIVE ACKNOWLEDGE
'\x16' # 0x0016 -> SYNCHRONOUS IDLE
'\x17' # 0x0017 -> END OF TRANSMISSION BLOCK
'\x18' # 0x0018 -> CANCEL
'\x19' # 0x0019 -> END OF MEDIUM
'\x1a' # 0x001a -> SUBSTITUTE
'\x1b' # 0x001b -> ESCAPE
'\x1c' # 0x001c -> FILE SEPARATOR
'\x1d' # 0x001d -> GROUP SEPARATOR
'\x1e' # 0x001e -> RECORD SEPARATOR
'\x1f' # 0x001f -> UNIT SEPARATOR
' ' # 0x0020 -> SPACE
'!' # 0x0021 -> EXCLAMATION MARK
'"' # 0x0022 -> QUOTATION MARK
'#' # 0x0023 -> NUMBER SIGN
'$' # 0x0024 -> DOLLAR SIGN
'%' # 0x0025 -> PERCENT SIGN
'&' # 0x0026 -> AMPERSAND
"'" # 0x0027 -> APOSTROPHE
'(' # 0x0028 -> LEFT PARENTHESIS
')' # 0x0029 -> RIGHT PARENTHESIS
'*' # 0x002a -> ASTERISK
'+' # 0x002b -> PLUS SIGN
',' # 0x002c -> COMMA
'-' # 0x002d -> HYPHEN-MINUS
'.' # 0x002e -> FULL STOP
'/' # 0x002f -> SOLIDUS
'0' # 0x0030 -> DIGIT ZERO
'1' # 0x0031 -> DIGIT ONE
'2' # 0x0032 -> DIGIT TWO
'3' # 0x0033 -> DIGIT THREE
'4' # 0x0034 -> DIGIT FOUR
'5' # 0x0035 -> DIGIT FIVE
'6' # 0x0036 -> DIGIT SIX
'7' # 0x0037 -> DIGIT SEVEN
'8' # 0x0038 -> DIGIT EIGHT
'9' # 0x0039 -> DIGIT NINE
':' # 0x003a -> COLON
';' # 0x003b -> SEMICOLON
'<' # 0x003c -> LESS-THAN SIGN
'=' # 0x003d -> EQUALS SIGN
'>' # 0x003e -> GREATER-THAN SIGN
'?' # 0x003f -> QUESTION MARK
'@' # 0x0040 -> COMMERCIAL AT
'A' # 0x0041 -> LATIN CAPITAL LETTER A
'B' # 0x0042 -> LATIN CAPITAL LETTER B
'C' # 0x0043 -> LATIN CAPITAL LETTER C
'D' # 0x0044 -> LATIN CAPITAL LETTER D
'E' # 0x0045 -> LATIN CAPITAL LETTER E
'F' # 0x0046 -> LATIN CAPITAL LETTER F
'G' # 0x0047 -> LATIN CAPITAL LETTER G
'H' # 0x0048 -> LATIN CAPITAL LETTER H
'I' # 0x0049 -> LATIN CAPITAL LETTER I
'J' # 0x004a -> LATIN CAPITAL LETTER J
'K' # 0x004b -> LATIN CAPITAL LETTER K
'L' # 0x004c -> LATIN CAPITAL LETTER L
'M' # 0x004d -> LATIN CAPITAL LETTER M
'N' # 0x004e -> LATIN CAPITAL LETTER N
'O' # 0x004f -> LATIN CAPITAL LETTER O
'P' # 0x0050 -> LATIN CAPITAL LETTER P
'Q' # 0x0051 -> LATIN CAPITAL LETTER Q
'R' # 0x0052 -> LATIN CAPITAL LETTER R
'S' # 0x0053 -> LATIN CAPITAL LETTER S
'T' # 0x0054 -> LATIN CAPITAL LETTER T
'U' # 0x0055 -> LATIN CAPITAL LETTER U
'V' # 0x0056 -> LATIN CAPITAL LETTER V
'W' # 0x0057 -> LATIN CAPITAL LETTER W
'X' # 0x0058 -> LATIN CAPITAL LETTER X
'Y' # 0x0059 -> LATIN CAPITAL LETTER Y
'Z' # 0x005a -> LATIN CAPITAL LETTER Z
'[' # 0x005b -> LEFT SQUARE BRACKET
'\\' # 0x005c -> REVERSE SOLIDUS
']' # 0x005d -> RIGHT SQUARE BRACKET
'^' # 0x005e -> CIRCUMFLEX ACCENT
'_' # 0x005f -> LOW LINE
'`' # 0x0060 -> GRAVE ACCENT
'a' # 0x0061 -> LATIN SMALL LETTER A
'b' # 0x0062 -> LATIN SMALL LETTER B
'c' # 0x0063 -> LATIN SMALL LETTER C
'd' # 0x0064 -> LATIN SMALL LETTER D
'e' # 0x0065 -> LATIN SMALL LETTER E
'f' # 0x0066 -> LATIN SMALL LETTER F
'g' # 0x0067 -> LATIN SMALL LETTER G
'h' # 0x0068 -> LATIN SMALL LETTER H
'i' # 0x0069 -> LATIN SMALL LETTER I
'j' # 0x006a -> LATIN SMALL LETTER J
'k' # 0x006b -> LATIN SMALL LETTER K
'l' # 0x006c -> LATIN SMALL LETTER L
'm' # 0x006d -> LATIN SMALL LETTER M
'n' # 0x006e -> LATIN SMALL LETTER N
'o' # 0x006f -> LATIN SMALL LETTER O
'p' # 0x0070 -> LATIN SMALL LETTER P
'q' # 0x0071 -> LATIN SMALL LETTER Q
'r' # 0x0072 -> LATIN SMALL LETTER R
's' # 0x0073 -> LATIN SMALL LETTER S
't' # 0x0074 -> LATIN SMALL LETTER T
'u' # 0x0075 -> LATIN SMALL LETTER U
'v' # 0x0076 -> LATIN SMALL LETTER V
'w' # 0x0077 -> LATIN SMALL LETTER W
'x' # 0x0078 -> LATIN SMALL LETTER X
'y' # 0x0079 -> LATIN SMALL LETTER Y
'z' # 0x007a -> LATIN SMALL LETTER Z
'{' # 0x007b -> LEFT CURLY BRACKET
'|' # 0x007c -> VERTICAL LINE
'}' # 0x007d -> RIGHT CURLY BRACKET
'~' # 0x007e -> TILDE
'\x7f' # 0x007f -> DELETE
'\xc7' # 0x0080 -> LATIN CAPITAL LETTER C WITH CEDILLA
'\xfc' # 0x0081 -> LATIN SMALL LETTER U WITH DIAERESIS
'\xe9' # 0x0082 -> LATIN SMALL LETTER E WITH ACUTE
'\xe2' # 0x0083 -> LATIN SMALL LETTER A WITH CIRCUMFLEX
'\xe4' # 0x0084 -> LATIN SMALL LETTER A WITH DIAERESIS
'\xe0' # 0x0085 -> LATIN SMALL LETTER A WITH GRAVE
'\xe5' # 0x0086 -> LATIN SMALL LETTER A WITH RING ABOVE
'\xe7' # 0x0087 -> LATIN SMALL LETTER C WITH CEDILLA
'\xea' # 0x0088 -> LATIN SMALL LETTER E WITH CIRCUMFLEX
'\xeb' # 0x0089 -> LATIN SMALL LETTER E WITH DIAERESIS
'\xe8' # 0x008a -> LATIN SMALL LETTER E WITH GRAVE
'\xef' # 0x008b -> LATIN SMALL LETTER I WITH DIAERESIS
'\xee' # 0x008c -> LATIN SMALL LETTER I WITH CIRCUMFLEX
'\xec' # 0x008d -> LATIN SMALL LETTER I WITH GRAVE
'\xc4' # 0x008e -> LATIN CAPITAL LETTER A WITH DIAERESIS
'\xc5' # 0x008f -> LATIN CAPITAL LETTER A WITH RING ABOVE
'\xc9' # 0x0090 -> LATIN CAPITAL LETTER E WITH ACUTE
'\xe6' # 0x0091 -> LATIN SMALL LIGATURE AE
'\xc6' # 0x0092 -> LATIN CAPITAL LIGATURE AE
'\xf4' # 0x0093 -> LATIN SMALL LETTER O WITH CIRCUMFLEX
'\xf6' # 0x0094 -> LATIN SMALL LETTER O WITH DIAERESIS
'\xf2' # 0x0095 -> LATIN SMALL LETTER O WITH GRAVE
'\xfb' # 0x0096 -> LATIN SMALL LETTER U WITH CIRCUMFLEX
'\xf9' # 0x0097 -> LATIN SMALL LETTER U WITH GRAVE
'\xff' # 0x0098 -> LATIN SMALL LETTER Y WITH DIAERESIS
'\xd6' # 0x0099 -> LATIN CAPITAL LETTER O WITH DIAERESIS
'\xdc' # 0x009a -> LATIN CAPITAL LETTER U WITH DIAERESIS
'\xf8' # 0x009b -> LATIN SMALL LETTER O WITH STROKE
'\xa3' # 0x009c -> POUND SIGN
'\xd8' # 0x009d -> LATIN CAPITAL LETTER O WITH STROKE
'\xd7' # 0x009e -> MULTIPLICATION SIGN
'\u0192' # 0x009f -> LATIN SMALL LETTER F WITH HOOK
'\xe1' # 0x00a0 -> LATIN SMALL LETTER A WITH ACUTE
'\xed' # 0x00a1 -> LATIN SMALL LETTER I WITH ACUTE
'\xf3' # 0x00a2 -> LATIN SMALL LETTER O WITH ACUTE
'\xfa' # 0x00a3 -> LATIN SMALL LETTER U WITH ACUTE
'\xf1' # 0x00a4 -> LATIN SMALL LETTER N WITH TILDE
'\xd1' # 0x00a5 -> LATIN CAPITAL LETTER N WITH TILDE
'\xaa' # 0x00a6 -> FEMININE ORDINAL INDICATOR
'\xba' # 0x00a7 -> MASCULINE ORDINAL INDICATOR
'\xbf' # 0x00a8 -> INVERTED QUESTION MARK
'\xae' # 0x00a9 -> REGISTERED SIGN
'\xac' # 0x00aa -> NOT SIGN
'\xbd' # 0x00ab -> VULGAR FRACTION ONE HALF
'\xbc' # 0x00ac -> VULGAR FRACTION ONE QUARTER
'\xa1' # 0x00ad -> INVERTED EXCLAMATION MARK
'\xab' # 0x00ae -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
'\xbb' # 0x00af -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
'\u2591' # 0x00b0 -> LIGHT SHADE
'\u2592' # 0x00b1 -> MEDIUM SHADE
'\u2593' # 0x00b2 -> DARK SHADE
'\u2502' # 0x00b3 -> BOX DRAWINGS LIGHT VERTICAL
'\u2524' # 0x00b4 -> BOX DRAWINGS LIGHT VERTICAL AND LEFT
'\xc1' # 0x00b5 -> LATIN CAPITAL LETTER A WITH ACUTE
'\xc2' # 0x00b6 -> LATIN CAPITAL LETTER A WITH CIRCUMFLEX
'\xc0' # 0x00b7 -> LATIN CAPITAL LETTER A WITH GRAVE
'\xa9' # 0x00b8 -> COPYRIGHT SIGN
'\u2563' # 0x00b9 -> BOX DRAWINGS DOUBLE VERTICAL AND LEFT
'\u2551' # 0x00ba -> BOX DRAWINGS DOUBLE VERTICAL
'\u2557' # 0x00bb -> BOX DRAWINGS DOUBLE DOWN AND LEFT
'\u255d' # 0x00bc -> BOX DRAWINGS DOUBLE UP AND LEFT
'\xa2' # 0x00bd -> CENT SIGN
'\xa5' # 0x00be -> YEN SIGN
'\u2510' # 0x00bf -> BOX DRAWINGS LIGHT DOWN AND LEFT
'\u2514' # 0x00c0 -> BOX DRAWINGS LIGHT UP AND RIGHT
'\u2534' # 0x00c1 -> BOX DRAWINGS LIGHT UP AND HORIZONTAL
'\u252c' # 0x00c2 -> BOX DRAWINGS LIGHT DOWN AND HORIZONTAL
'\u251c' # 0x00c3 -> BOX DRAWINGS LIGHT VERTICAL AND RIGHT
'\u2500' # 0x00c4 -> BOX DRAWINGS LIGHT HORIZONTAL
'\u253c' # 0x00c5 -> BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL
'\xe3' # 0x00c6 -> LATIN SMALL LETTER A WITH TILDE
'\xc3' # 0x00c7 -> LATIN CAPITAL LETTER A WITH TILDE
'\u255a' # 0x00c8 -> BOX DRAWINGS DOUBLE UP AND RIGHT
'\u2554' # 0x00c9 -> BOX DRAWINGS DOUBLE DOWN AND RIGHT
'\u2569' # 0x00ca -> BOX DRAWINGS DOUBLE UP AND HORIZONTAL
'\u2566' # 0x00cb -> BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL
'\u2560' # 0x00cc -> BOX DRAWINGS DOUBLE VERTICAL AND RIGHT
'\u2550' # 0x00cd -> BOX DRAWINGS DOUBLE HORIZONTAL
'\u256c' # 0x00ce -> BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL
'\xa4' # 0x00cf -> CURRENCY SIGN
'\xf0' # 0x00d0 -> LATIN SMALL LETTER ETH
'\xd0' # 0x00d1 -> LATIN CAPITAL LETTER ETH
'\xca' # 0x00d2 -> LATIN CAPITAL LETTER E WITH CIRCUMFLEX
'\xcb' # 0x00d3 -> LATIN CAPITAL LETTER E WITH DIAERESIS
'\xc8' # 0x00d4 -> LATIN CAPITAL LETTER E WITH GRAVE
'\u20ac' # 0x00d5 -> EURO SIGN
'\xcd' # 0x00d6 -> LATIN CAPITAL LETTER I WITH ACUTE
'\xce' # 0x00d7 -> LATIN CAPITAL LETTER I WITH CIRCUMFLEX
'\xcf' # 0x00d8 -> LATIN CAPITAL LETTER I WITH DIAERESIS
'\u2518' # 0x00d9 -> BOX DRAWINGS LIGHT UP AND LEFT
'\u250c' # 0x00da -> BOX DRAWINGS LIGHT DOWN AND RIGHT
'\u2588' # 0x00db -> FULL BLOCK
'\u2584' # 0x00dc -> LOWER HALF BLOCK
'\xa6' # 0x00dd -> BROKEN BAR
'\xcc' # 0x00de -> LATIN CAPITAL LETTER I WITH GRAVE
'\u2580' # 0x00df -> UPPER HALF BLOCK
'\xd3' # 0x00e0 -> LATIN CAPITAL LETTER O WITH ACUTE
'\xdf' # 0x00e1 -> LATIN SMALL LETTER SHARP S
'\xd4' # 0x00e2 -> LATIN CAPITAL LETTER O WITH CIRCUMFLEX
'\xd2' # 0x00e3 -> LATIN CAPITAL LETTER O WITH GRAVE
'\xf5' # 0x00e4 -> LATIN SMALL LETTER O WITH TILDE
'\xd5' # 0x00e5 -> LATIN CAPITAL LETTER O WITH TILDE
'\xb5' # 0x00e6 -> MICRO SIGN
'\xfe' # 0x00e7 -> LATIN SMALL LETTER THORN
'\xde' # 0x00e8 -> LATIN CAPITAL LETTER THORN
'\xda' # 0x00e9 -> LATIN CAPITAL LETTER U WITH ACUTE
'\xdb' # 0x00ea -> LATIN CAPITAL LETTER U WITH CIRCUMFLEX
'\xd9' # 0x00eb -> LATIN CAPITAL LETTER U WITH GRAVE
'\xfd' # 0x00ec -> LATIN SMALL LETTER Y WITH ACUTE
'\xdd' # 0x00ed -> LATIN CAPITAL LETTER Y WITH ACUTE
'\xaf' # 0x00ee -> MACRON
'\xb4' # 0x00ef -> ACUTE ACCENT
'\xad' # 0x00f0 -> SOFT HYPHEN
'\xb1' # 0x00f1 -> PLUS-MINUS SIGN
'\u2017' # 0x00f2 -> DOUBLE LOW LINE
'\xbe' # 0x00f3 -> VULGAR FRACTION THREE QUARTERS
'\xb6' # 0x00f4 -> PILCROW SIGN
'\xa7' # 0x00f5 -> SECTION SIGN
'\xf7' # 0x00f6 -> DIVISION SIGN
'\xb8' # 0x00f7 -> CEDILLA
'\xb0' # 0x00f8 -> DEGREE SIGN
'\xa8' # 0x00f9 -> DIAERESIS
'\xb7' # 0x00fa -> MIDDLE DOT
'\xb9' # 0x00fb -> SUPERSCRIPT ONE
'\xb3' # 0x00fc -> SUPERSCRIPT THREE
'\xb2' # 0x00fd -> SUPERSCRIPT TWO
'\u25a0' # 0x00fe -> BLACK SQUARE
'\xa0' # 0x00ff -> NO-BREAK SPACE
)
### Encoding Map
encoding_map = {
0x0000: 0x0000, # NULL
0x0001: 0x0001, # START OF HEADING
0x0002: 0x0002, # START OF TEXT
0x0003: 0x0003, # END OF TEXT
0x0004: 0x0004, # END OF TRANSMISSION
0x0005: 0x0005, # ENQUIRY
0x0006: 0x0006, # ACKNOWLEDGE
0x0007: 0x0007, # BELL
0x0008: 0x0008, # BACKSPACE
0x0009: 0x0009, # HORIZONTAL TABULATION
0x000a: 0x000a, # LINE FEED
0x000b: 0x000b, # VERTICAL TABULATION
0x000c: 0x000c, # FORM FEED
0x000d: 0x000d, # CARRIAGE RETURN
0x000e: 0x000e, # SHIFT OUT
0x000f: 0x000f, # SHIFT IN
0x0010: 0x0010, # DATA LINK ESCAPE
0x0011: 0x0011, # DEVICE CONTROL ONE
0x0012: 0x0012, # DEVICE CONTROL TWO
0x0013: 0x0013, # DEVICE CONTROL THREE
0x0014: 0x0014, # DEVICE CONTROL FOUR
0x0015: 0x0015, # NEGATIVE ACKNOWLEDGE
0x0016: 0x0016, # SYNCHRONOUS IDLE
0x0017: 0x0017, # END OF TRANSMISSION BLOCK
0x0018: 0x0018, # CANCEL
0x0019: 0x0019, # END OF MEDIUM
0x001a: 0x001a, # SUBSTITUTE
0x001b: 0x001b, # ESCAPE
0x001c: 0x001c, # FILE SEPARATOR
0x001d: 0x001d, # GROUP SEPARATOR
0x001e: 0x001e, # RECORD SEPARATOR
0x001f: 0x001f, # UNIT SEPARATOR
0x0020: 0x0020, # SPACE
0x0021: 0x0021, # EXCLAMATION MARK
0x0022: 0x0022, # QUOTATION MARK
0x0023: 0x0023, # NUMBER SIGN
0x0024: 0x0024, # DOLLAR SIGN
0x0025: 0x0025, # PERCENT SIGN
0x0026: 0x0026, # AMPERSAND
0x0027: 0x0027, # APOSTROPHE
0x0028: 0x0028, # LEFT PARENTHESIS
0x0029: 0x0029, # RIGHT PARENTHESIS
0x002a: 0x002a, # ASTERISK
0x002b: 0x002b, # PLUS SIGN
0x002c: 0x002c, # COMMA
0x002d: 0x002d, # HYPHEN-MINUS
0x002e: 0x002e, # FULL STOP
0x002f: 0x002f, # SOLIDUS
0x0030: 0x0030, # DIGIT ZERO
0x0031: 0x0031, # DIGIT ONE
0x0032: 0x0032, # DIGIT TWO
0x0033: 0x0033, # DIGIT THREE
0x0034: 0x0034, # DIGIT FOUR
0x0035: 0x0035, # DIGIT FIVE
0x0036: 0x0036, # DIGIT SIX
0x0037: 0x0037, # DIGIT SEVEN
0x0038: 0x0038, # DIGIT EIGHT
0x0039: 0x0039, # DIGIT NINE
0x003a: 0x003a, # COLON
0x003b: 0x003b, # SEMICOLON
0x003c: 0x003c, # LESS-THAN SIGN
0x003d: 0x003d, # EQUALS SIGN
0x003e: 0x003e, # GREATER-THAN SIGN
0x003f: 0x003f, # QUESTION MARK
0x0040: 0x0040, # COMMERCIAL AT
0x0041: 0x0041, # LATIN CAPITAL LETTER A
0x0042: 0x0042, # LATIN CAPITAL LETTER B
0x0043: 0x0043, # LATIN CAPITAL LETTER C
0x0044: 0x0044, # LATIN CAPITAL LETTER D
0x0045: 0x0045, # LATIN CAPITAL LETTER E
0x0046: 0x0046, # LATIN CAPITAL LETTER F
0x0047: 0x0047, # LATIN CAPITAL LETTER G
0x0048: 0x0048, # LATIN CAPITAL LETTER H
0x0049: 0x0049, # LATIN CAPITAL LETTER I
0x004a: 0x004a, # LATIN CAPITAL LETTER J
0x004b: 0x004b, # LATIN CAPITAL LETTER K
0x004c: 0x004c, # LATIN CAPITAL LETTER L
0x004d: 0x004d, # LATIN CAPITAL LETTER M
0x004e: 0x004e, # LATIN CAPITAL LETTER N
0x004f: 0x004f, # LATIN CAPITAL LETTER O
0x0050: 0x0050, # LATIN CAPITAL LETTER P
0x0051: 0x0051, # LATIN CAPITAL LETTER Q
0x0052: 0x0052, # LATIN CAPITAL LETTER R
0x0053: 0x0053, # LATIN CAPITAL LETTER S
0x0054: 0x0054, # LATIN CAPITAL LETTER T
0x0055: 0x0055, # LATIN CAPITAL LETTER U
0x0056: 0x0056, # LATIN CAPITAL LETTER V
0x0057: 0x0057, # LATIN CAPITAL LETTER W
0x0058: 0x0058, # LATIN CAPITAL LETTER X
0x0059: 0x0059, # LATIN CAPITAL LETTER Y
0x005a: 0x005a, # LATIN CAPITAL LETTER Z
0x005b: 0x005b, # LEFT SQUARE BRACKET
0x005c: 0x005c, # REVERSE SOLIDUS
0x005d: 0x005d, # RIGHT SQUARE BRACKET
0x005e: 0x005e, # CIRCUMFLEX ACCENT
0x005f: 0x005f, # LOW LINE
0x0060: 0x0060, # GRAVE ACCENT
0x0061: 0x0061, # LATIN SMALL LETTER A
0x0062: 0x0062, # LATIN SMALL LETTER B
0x0063: 0x0063, # LATIN SMALL LETTER C
0x0064: 0x0064, # LATIN SMALL LETTER D
0x0065: 0x0065, # LATIN SMALL LETTER E
0x0066: 0x0066, # LATIN SMALL LETTER F
0x0067: 0x0067, # LATIN SMALL LETTER G
0x0068: 0x0068, # LATIN SMALL LETTER H
0x0069: 0x0069, # LATIN SMALL LETTER I
0x006a: 0x006a, # LATIN SMALL LETTER J
0x006b: 0x006b, # LATIN SMALL LETTER K
0x006c: 0x006c, # LATIN SMALL LETTER L
0x006d: 0x006d, # LATIN SMALL LETTER M
0x006e: 0x006e, # LATIN SMALL LETTER N
0x006f: 0x006f, # LATIN SMALL LETTER O
0x0070: 0x0070, # LATIN SMALL LETTER P
0x0071: 0x0071, # LATIN SMALL LETTER Q
0x0072: 0x0072, # LATIN SMALL LETTER R
0x0073: 0x0073, # LATIN SMALL LETTER S
0x0074: 0x0074, # LATIN SMALL LETTER T
0x0075: 0x0075, # LATIN SMALL LETTER U
0x0076: 0x0076, # LATIN SMALL LETTER V
0x0077: 0x0077, # LATIN SMALL LETTER W
0x0078: 0x0078, # LATIN SMALL LETTER X
0x0079: 0x0079, # LATIN SMALL LETTER Y
0x007a: 0x007a, # LATIN SMALL LETTER Z
0x007b: 0x007b, # LEFT CURLY BRACKET
0x007c: 0x007c, # VERTICAL LINE
0x007d: 0x007d, # RIGHT CURLY BRACKET
0x007e: 0x007e, # TILDE
0x007f: 0x007f, # DELETE
0x00a0: 0x00ff, # NO-BREAK SPACE
0x00a1: 0x00ad, # INVERTED EXCLAMATION MARK
0x00a2: 0x00bd, # CENT SIGN
0x00a3: 0x009c, # POUND SIGN
0x00a4: 0x00cf, # CURRENCY SIGN
0x00a5: 0x00be, # YEN SIGN
0x00a6: 0x00dd, # BROKEN BAR
0x00a7: 0x00f5, # SECTION SIGN
0x00a8: 0x00f9, # DIAERESIS
0x00a9: 0x00b8, # COPYRIGHT SIGN
0x00aa: 0x00a6, # FEMININE ORDINAL INDICATOR
0x00ab: 0x00ae, # LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
0x00ac: 0x00aa, # NOT SIGN
0x00ad: 0x00f0, # SOFT HYPHEN
0x00ae: 0x00a9, # REGISTERED SIGN
0x00af: 0x00ee, # MACRON
0x00b0: 0x00f8, # DEGREE SIGN
0x00b1: 0x00f1, # PLUS-MINUS SIGN
0x00b2: 0x00fd, # SUPERSCRIPT TWO
0x00b3: 0x00fc, # SUPERSCRIPT THREE
0x00b4: 0x00ef, # ACUTE ACCENT
0x00b5: 0x00e6, # MICRO SIGN
0x00b6: 0x00f4, # PILCROW SIGN
0x00b7: 0x00fa, # MIDDLE DOT
0x00b8: 0x00f7, # CEDILLA
0x00b9: 0x00fb, # SUPERSCRIPT ONE
0x00ba: 0x00a7, # MASCULINE ORDINAL INDICATOR
0x00bb: 0x00af, # RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
0x00bc: 0x00ac, # VULGAR FRACTION ONE QUARTER
0x00bd: 0x00ab, # VULGAR FRACTION ONE HALF
0x00be: 0x00f3, # VULGAR FRACTION THREE QUARTERS
0x00bf: 0x00a8, # INVERTED QUESTION MARK
0x00c0: 0x00b7, # LATIN CAPITAL LETTER A WITH GRAVE
0x00c1: 0x00b5, # LATIN CAPITAL LETTER A WITH ACUTE
0x00c2: 0x00b6, # LATIN CAPITAL LETTER A WITH CIRCUMFLEX
0x00c3: 0x00c7, # LATIN CAPITAL LETTER A WITH TILDE
0x00c4: 0x008e, # LATIN CAPITAL LETTER A WITH DIAERESIS
0x00c5: 0x008f, # LATIN CAPITAL LETTER A WITH RING ABOVE
0x00c6: 0x0092, # LATIN CAPITAL LIGATURE AE
0x00c7: 0x0080, # LATIN CAPITAL LETTER C WITH CEDILLA
0x00c8: 0x00d4, # LATIN CAPITAL LETTER E WITH GRAVE
0x00c9: 0x0090, # LATIN CAPITAL LETTER E WITH ACUTE
0x00ca: 0x00d2, # LATIN CAPITAL LETTER E WITH CIRCUMFLEX
0x00cb: 0x00d3, # LATIN CAPITAL LETTER E WITH DIAERESIS
0x00cc: 0x00de, # LATIN CAPITAL LETTER I WITH GRAVE
0x00cd: 0x00d6, # LATIN CAPITAL LETTER I WITH ACUTE
0x00ce: 0x00d7, # LATIN CAPITAL LETTER I WITH CIRCUMFLEX
0x00cf: 0x00d8, # LATIN CAPITAL LETTER I WITH DIAERESIS
0x00d0: 0x00d1, # LATIN CAPITAL LETTER ETH
0x00d1: 0x00a5, # LATIN CAPITAL LETTER N WITH TILDE
0x00d2: 0x00e3, # LATIN CAPITAL LETTER O WITH GRAVE
0x00d3: 0x00e0, # LATIN CAPITAL LETTER O WITH ACUTE
0x00d4: 0x00e2, # LATIN CAPITAL LETTER O WITH CIRCUMFLEX
0x00d5: 0x00e5, # LATIN CAPITAL LETTER O WITH TILDE
0x00d6: 0x0099, # LATIN CAPITAL LETTER O WITH DIAERESIS
0x00d7: 0x009e, # MULTIPLICATION SIGN
0x00d8: 0x009d, # LATIN CAPITAL LETTER O WITH STROKE
0x00d9: 0x00eb, # LATIN CAPITAL LETTER U WITH GRAVE
0x00da: 0x00e9, # LATIN CAPITAL LETTER U WITH ACUTE
0x00db: 0x00ea, # LATIN CAPITAL LETTER U WITH CIRCUMFLEX
0x00dc: 0x009a, # LATIN CAPITAL LETTER U WITH DIAERESIS
0x00dd: 0x00ed, # LATIN CAPITAL LETTER Y WITH ACUTE
0x00de: 0x00e8, # LATIN CAPITAL LETTER THORN
0x00df: 0x00e1, # LATIN SMALL LETTER SHARP S
0x00e0: 0x0085, # LATIN SMALL LETTER A WITH GRAVE
0x00e1: 0x00a0, # LATIN SMALL LETTER A WITH ACUTE
0x00e2: 0x0083, # LATIN SMALL LETTER A WITH CIRCUMFLEX
0x00e3: 0x00c6, # LATIN SMALL LETTER A WITH TILDE
0x00e4: 0x0084, # LATIN SMALL LETTER A WITH DIAERESIS
0x00e5: 0x0086, # LATIN SMALL LETTER A WITH RING ABOVE
0x00e6: 0x0091, # LATIN SMALL LIGATURE AE
0x00e7: 0x0087, # LATIN SMALL LETTER C WITH CEDILLA
0x00e8: 0x008a, # LATIN SMALL LETTER E WITH GRAVE
0x00e9: 0x0082, # LATIN SMALL LETTER E WITH ACUTE
0x00ea: 0x0088, # LATIN SMALL LETTER E WITH CIRCUMFLEX
0x00eb: 0x0089, # LATIN SMALL LETTER E WITH DIAERESIS
0x00ec: 0x008d, # LATIN SMALL LETTER I WITH GRAVE
0x00ed: 0x00a1, # LATIN SMALL LETTER I WITH ACUTE
0x00ee: 0x008c, # LATIN SMALL LETTER I WITH CIRCUMFLEX
0x00ef: 0x008b, # LATIN SMALL LETTER I WITH DIAERESIS
0x00f0: 0x00d0, # LATIN SMALL LETTER ETH
0x00f1: 0x00a4, # LATIN SMALL LETTER N WITH TILDE
0x00f2: 0x0095, # LATIN SMALL LETTER O WITH GRAVE
0x00f3: 0x00a2, # LATIN SMALL LETTER O WITH ACUTE
0x00f4: 0x0093, # LATIN SMALL LETTER O WITH CIRCUMFLEX
0x00f5: 0x00e4, # LATIN SMALL LETTER O WITH TILDE
0x00f6: 0x0094, # LATIN SMALL LETTER O WITH DIAERESIS
0x00f7: 0x00f6, # DIVISION SIGN
0x00f8: 0x009b, # LATIN SMALL LETTER O WITH STROKE
0x00f9: 0x0097, # LATIN SMALL LETTER U WITH GRAVE
0x00fa: 0x00a3, # LATIN SMALL LETTER U WITH ACUTE
0x00fb: 0x0096, # LATIN SMALL LETTER U WITH CIRCUMFLEX
0x00fc: 0x0081, # LATIN SMALL LETTER U WITH DIAERESIS
0x00fd: 0x00ec, # LATIN SMALL LETTER Y WITH ACUTE
0x00fe: 0x00e7, # LATIN SMALL LETTER THORN
0x00ff: 0x0098, # LATIN SMALL LETTER Y WITH DIAERESIS
0x20ac: 0x00d5, # EURO SIGN
0x0192: 0x009f, # LATIN SMALL LETTER F WITH HOOK
0x2017: 0x00f2, # DOUBLE LOW LINE
0x2500: 0x00c4, # BOX DRAWINGS LIGHT HORIZONTAL
0x2502: 0x00b3, # BOX DRAWINGS LIGHT VERTICAL
0x250c: 0x00da, # BOX DRAWINGS LIGHT DOWN AND RIGHT
0x2510: 0x00bf, # BOX DRAWINGS LIGHT DOWN AND LEFT
0x2514: 0x00c0, # BOX DRAWINGS LIGHT UP AND RIGHT
0x2518: 0x00d9, # BOX DRAWINGS LIGHT UP AND LEFT
0x251c: 0x00c3, # BOX DRAWINGS LIGHT VERTICAL AND RIGHT
0x2524: 0x00b4, # BOX DRAWINGS LIGHT VERTICAL AND LEFT
0x252c: 0x00c2, # BOX DRAWINGS LIGHT DOWN AND HORIZONTAL
0x2534: 0x00c1, # BOX DRAWINGS LIGHT UP AND HORIZONTAL
0x253c: 0x00c5, # BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL
0x2550: 0x00cd, # BOX DRAWINGS DOUBLE HORIZONTAL
0x2551: 0x00ba, # BOX DRAWINGS DOUBLE VERTICAL
0x2554: 0x00c9, # BOX DRAWINGS DOUBLE DOWN AND RIGHT
0x2557: 0x00bb, # BOX DRAWINGS DOUBLE DOWN AND LEFT
0x255a: 0x00c8, # BOX DRAWINGS DOUBLE UP AND RIGHT
0x255d: 0x00bc, # BOX DRAWINGS DOUBLE UP AND LEFT
0x2560: 0x00cc, # BOX DRAWINGS DOUBLE VERTICAL AND RIGHT
0x2563: 0x00b9, # BOX DRAWINGS DOUBLE VERTICAL AND LEFT
0x2566: 0x00cb, # BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL
0x2569: 0x00ca, # BOX DRAWINGS DOUBLE UP AND HORIZONTAL
0x256c: 0x00ce, # BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL
0x2580: 0x00df, # UPPER HALF BLOCK
0x2584: 0x00dc, # LOWER HALF BLOCK
0x2588: 0x00db, # FULL BLOCK
0x2591: 0x00b0, # LIGHT SHADE
0x2592: 0x00b1, # MEDIUM SHADE
0x2593: 0x00b2, # DARK SHADE
0x25a0: 0x00fe, # BLACK SQUARE
}
| lgpl-3.0 |
OSSystems/lava-server | dashboard_app/migrations/0003_add_index_HardwareDevice_device_type.py | 1 | 13377 | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding index on 'HardwareDevice', fields ['device_type', 'id']
db.create_index('dashboard_app_hardwaredevice', ['device_type', 'id'])
def backwards(self, orm):
# Removing index on 'HardwareDevice', fields ['device_type', 'id']
db.delete_index('dashboard_app_hardwaredevice', ['device_type', 'id'])
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'dashboard_app.attachment': {
'Meta': {'object_name': 'Attachment'},
'content': ('django.db.models.fields.files.FileField', [], {'max_length': '100', 'null': 'True'}),
'content_filename': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'mime_type': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'public_url': ('django.db.models.fields.URLField', [], {'max_length': '512', 'blank': 'True'})
},
'dashboard_app.bundle': {
'Meta': {'ordering': "['-uploaded_on']", 'object_name': 'Bundle'},
'bundle_stream': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'bundles'", 'to': "orm['dashboard_app.BundleStream']"}),
'content': ('django.db.models.fields.files.FileField', [], {'max_length': '100', 'null': 'True'}),
'content_filename': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'content_sha1': ('django.db.models.fields.CharField', [], {'max_length': '40', 'unique': 'True', 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_deserialized': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'uploaded_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'uploaded_bundles'", 'null': 'True', 'to': "orm['auth.User']"}),
'uploaded_on': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.utcnow'})
},
'dashboard_app.bundledeserializationerror': {
'Meta': {'object_name': 'BundleDeserializationError'},
'bundle': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'deserialization_error'", 'unique': 'True', 'primary_key': 'True', 'to': "orm['dashboard_app.Bundle']"}),
'error_message': ('django.db.models.fields.CharField', [], {'max_length': '1024'}),
'traceback': ('django.db.models.fields.TextField', [], {'max_length': '32768'})
},
'dashboard_app.bundlestream': {
'Meta': {'object_name': 'BundleStream'},
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.Group']", 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_anonymous': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_public': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}),
'pathname': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128'}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
'dashboard_app.hardwaredevice': {
'Meta': {'object_name': 'HardwareDevice'},
'description': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'device_type': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'dashboard_app.namedattribute': {
'Meta': {'unique_together': "(('object_id', 'name'),)", 'object_name': 'NamedAttribute'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '512'})
},
'dashboard_app.softwarepackage': {
'Meta': {'unique_together': "(('name', 'version'),)", 'object_name': 'SoftwarePackage'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'version': ('django.db.models.fields.CharField', [], {'max_length': '64'})
},
'dashboard_app.softwaresource': {
'Meta': {'object_name': 'SoftwareSource'},
'branch_revision': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'branch_url': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'branch_vcs': ('django.db.models.fields.CharField', [], {'max_length': '10'}),
'commit_timestamp': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'project_name': ('django.db.models.fields.CharField', [], {'max_length': '32'})
},
'dashboard_app.test': {
'Meta': {'object_name': 'Test'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}),
'test_id': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '64'})
},
'dashboard_app.testcase': {
'Meta': {'unique_together': "(('test', 'test_case_id'),)", 'object_name': 'TestCase'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'test': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'test_cases'", 'to': "orm['dashboard_app.Test']"}),
'test_case_id': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'units': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'})
},
'dashboard_app.testresult': {
'Meta': {'ordering': "('_order',)", 'object_name': 'TestResult'},
'_order': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'filename': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'lineno': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'measurement': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '10', 'blank': 'True'}),
'message': ('django.db.models.fields.TextField', [], {'max_length': '1024', 'null': 'True', 'blank': 'True'}),
'microseconds': ('django.db.models.fields.BigIntegerField', [], {'null': 'True', 'blank': 'True'}),
'relative_index': ('django.db.models.fields.PositiveIntegerField', [], {}),
'result': ('django.db.models.fields.PositiveSmallIntegerField', [], {}),
'test_case': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'test_results'", 'null': 'True', 'to': "orm['dashboard_app.TestCase']"}),
'test_run': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'test_results'", 'to': "orm['dashboard_app.TestRun']"}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'})
},
'dashboard_app.testrun': {
'Meta': {'ordering': "['-import_assigned_date']", 'object_name': 'TestRun'},
'analyzer_assigned_date': ('django.db.models.fields.DateTimeField', [], {}),
'analyzer_assigned_uuid': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '36'}),
'bundle': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'test_runs'", 'to': "orm['dashboard_app.Bundle']"}),
'devices': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'test_runs'", 'blank': 'True', 'to': "orm['dashboard_app.HardwareDevice']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'import_assigned_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'packages': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'test_runs'", 'blank': 'True', 'to': "orm['dashboard_app.SoftwarePackage']"}),
'sources': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'test_runs'", 'blank': 'True', 'to': "orm['dashboard_app.SoftwareSource']"}),
'sw_image_desc': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'test': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'test_runs'", 'to': "orm['dashboard_app.Test']"}),
'time_check_performed': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
}
}
complete_apps = ['dashboard_app']
| agpl-3.0 |
zvoase/twactor | twactor/cache.py | 1 | 17379 | # -*- coding:utf-8 -*-
# twactor.cache - Cache framework for twactor.
import operator
import time
try:
import threading
except:
import dummy_threading as threading
from twactor import connection, function_sync, propertyfix
class CachedMetaclass(type):
"""Metaclass for subclasses of ``CachedObject``."""
def __new__(cls, name, bases, attrs):
# Fix _update_cache
update_cache = attrs.get('_update_cache', lambda *args, **kwargs: None)
def fixed_update_cache(self, *args, **kwargs):
val = update_cache(self, *args, **kwargs)
if hasattr(bases[-1], '_update_cache'):
bases[-1]._update_cache(self, *args, **kwargs)
return val
attrs['_update_cache'] = function_sync(update_cache, fixed_update_cache)
# Fix __init__
init = attrs.get('__init__', lambda *args, **kwargs: None)
def fixed_init(self, *args, **kwargs):
if hasattr(bases[-1], '__init__') and bases[-1] is not object:
bases[-1].__init__(self, *args, **kwargs)
init(self, *args, **kwargs)
attrs['__init__'] = function_sync(init, fixed_init)
return type.__new__(cls, name, bases, attrs)
class CachedObject(object):
"""Superclass for cached objects."""
__metaclass__ = CachedMetaclass
_connection_broker = connection.DEFAULT_CB
def __init__(self, *args, **kwargs):
self._cache = kwargs.pop('cache', {})
self._updated = kwargs.pop('_updated', {'__count': 0, '__time': 0})
def _update_cache(self, *args, **kwargs):
self._updated['__count'] = self._updated.get('__count', 0) + 1
self._updated['__time'] = time.time()
def _with_connection_broker(self, cb):
copy = self._copy()
copy._connection_broker = cb
return copy
def _copy(self):
return type(self)(self._cache.get('id', None), cache=self._cache.copy(),
updated=self._updated.copy())
class CachedMirror(object):
"""Superclass for objects which rely on another object's cache."""
def __init__(self, mirrored_object):
setattr(self, self._mirrored_attribute, mirrored_object)
self._mirrored = mirrored_object
def mirror_attribute(attribute):
"""Shortcut for mirroring an attribute on another object."""
def attr_methods():
def fget(self):
return reduce(getattr, attribute.split('.'), self)
def fset(self, value):
setattr(reduce(getattr, attribute.split('.')[:-1], self),
attribute.split('.'), value)
def fdel(self):
delattr(reduce(getattr, attribute.split('.')[:-1], self),
attribute.split('.'))
return {'fget': fget, 'fset': fset, 'fdel': fdel}
return property(**attr_methods())
_cache = mirror_attribute('_mirrored._cache')
_update_cache = mirror_attribute('_mirrored._update_cache')
_updated = mirror_attribute('_mirrored._updated')
del mirror_attribute
class CachedListMetaclass(type):
def __new__(cls, name, bases, attrs):
# Fix __init__
init = attrs.get('__init__', lambda *args, **kwargs: None)
def fixed_init(self, *args, **kwargs):
for base in reversed(bases):
if base is object:
break
base.__init__(self, *args, **kwargs)
init(self, *args, **kwargs)
attrs['__init__'] = function_sync(init, fixed_init)
# Fix _update_cache
update_cache = attrs.get('_update_cache', None)
if not update_cache:
for base in reversed(bases):
if hasattr(base, '_update_cache'):
update_cache = base._update_cache
break
if update_cache:
def fixed_update_cache(self, *args, **kwargs):
data = update_cache(self, *args, **kwargs)
for base in reversed(bases):
if hasattr(base, '_insert_into_cache'):
base._insert_into_cache(self, data)
break
attrs['_update_cache'] = function_sync(update_cache,
fixed_update_cache)
return type.__new__(cls, name, bases, attrs)
class CachedList(object):
__metaclass__ = CachedListMetaclass
_connection_broker = connection.DEFAULT_CB
_sort_attrs = ('created', 'id')
_reverse_class = None
OBJ_CLASS = lambda cache: cache
UPDATE_INTERVAL = 60 * 3 # Three-minute update interval by default.
def __init__(self, *args, **kwargs):
self._cache = kwargs.pop('cache', [])
self._object_cache = kwargs.pop('object_cache', {})
self._updated = kwargs.pop('updated', {'__count': 0, '__time': 0})
self.update_monitor = CachedListUpdateMonitorThread(self)
def __getitem__(self, pos_or_slice):
if isinstance(pos_or_slice, (int, long)):
return self._cache_to_obj(
self._cache[self._resolve_cache_index(pos_or_slice)])
start, stop, step = [getattr(pos_or_slice, attr)
for attr in ('start', 'stop', 'step')]
start = self._resolve_cache_index(start, start=True)
stop = self._resolve_cache_index(stop, start=False)
new_cache = map(self._cache.__getitem__, range(start, stop, step or 1))
new_updated = {'__count': self._updated['__count'],
'__time': self._updated['__time']}
for item in new_cache:
count_key = '%s__count' % (item.get('id', repr(item)))
time_key = '%s__time' % (item.get('id', repr(item)))
new_updated[count_key] = self._updated.get(count_key, None)
new_updated[time_key] = self._updated.get(time_key, None)
return type(self)(
cache=new_cache, updated=new_updated)._with_connection_broker(
self._connection_broker)
def __delitem__(self, pos_or_slice):
raise NotImplementedError
def __iter__(self):
for item in self._cache:
yield self._cache_to_obj(item)
def __reversed__(self):
raise NotImplementedError
def __contains__(self, obj):
if not isinstance(obj, self.OBJ_CLASS):
return False
return obj.id in (obj2.id for obj2 in self._objects)
def __len__(self):
raise NotImplementedError
def _cache_to_obj(self, cache_item):
if 'id' in cache_item and cache_item['id'] in self._object_cache:
obj = self._object_cache[cache_item['id']]
elif 'id' in cache_item and cache_item['id'] not in self._object_cache:
obj = self.OBJ_CLASS(cache_item['id'], cache=cache_item)
self._object_cache[cache_item['id']] = obj
else:
obj = self.OBJ_CLASS(None, cache=cache_item)
self._object_cache[repr(obj)] = obj
if hasattr(obj, '_with_connection_broker'):
return obj._with_connection_broker(self._connection_broker)
return obj
def _clean_object_cache(self):
obj_cache_ids = self._object_cache.keys()
data_cache_ids = map(operator.attrgetter('id'), self._objects)
for obj_id in obj_cache_ids:
if obj_id not in data_cache_ids:
del self._objects[obj_id]
def _copy(self):
copy = type(self)(cache=self._cache[:],
updated=self._updated.copy())
copy._connection_broker = self._connection_broker
return copy
@property
def _objects(self):
return map(self._cache_to_obj, self._cache)
def _resolve_cache_index(self, index, start=True):
if index < 0:
old_length, length = None, len(self._cache)
while (old_length != length):
old_length = length
self._update_cache()
length = len(self._cache)
if abs(index) <= length:
return length + index
raise IndexError('list index out of range')
elif (not index) and (index != 0):
return 0 if start else (len(self._cache) - 1)
elif index < len(self._cache):
return index
old_length, length = None, len(self._cache)
while (index >= length) and (old_length != length):
old_length = length
self._update_cache()
length = len(self._cache)
if old_length == length:
raise IndexError('list index out of range')
return index
def _sort_key(self, item):
return operator.attrgetter(*self._sort_attrs)(item)
def _with_connection_broker(self, connection_broker):
copy = self._copy()
copy._connection_broker = connection_broker
return copy
class CachedListUpdateMonitorThread(threading.Thread):
def __init__(self, object, *args, **kwargs):
super(CachedListUpdateMonitorThread, self).__init__(
*args, **kwargs)
self.object = object
self.kill_flag = False
def run(self):
while not self.kill_flag:
self.object._update_cache()
time.sleep(self.object.UPDATE_INTERVAL)
self.kill_flag = False
def stop(self):
self.kill_flag = True
class ForwardCachedList(CachedList):
def _insert_into_cache(self, fetched_data):
if not fetched_data:
self._updated['__count'] = self._updated.get('__count', 0) + 1
self._updated['__time'] = time.time()
return
fetched_objects = zip(fetched_data,
map(self._cache_to_obj, fetched_data))
sorted_objects = sorted(fetched_objects,
key=lambda pair: self._sort_key(pair[1]))
timestamp = time.time()
if not self._cache:
for data, object in sorted_objects:
count_key = '%s__count' % (getattr(object, 'id', repr(object)),)
time_key = '%s__time' % (getattr(object, 'id', repr(object)),)
self._updated[count_key] = self._updated.get(count_key, 0) + 1
self._updated[time_key] = timestamp
self._cache.extend(pair[0] for pair in sorted_objects)
else:
latest_key = self._sort_key(self._cache_to_obj(self._cache[-1]))
add_to_cache = self._sort_key(sorted_objects[0][1]) > latest_key
for data, object in sorted_objects:
count_key = '%s__count' % (getattr(object, 'id', repr(object)),)
time_key = '%s__time' % (getattr(object, 'id', repr(object)),)
self._updated[count_key] = self._updated.get(count_key, 0) + 1
self._updated[time_key] = timestamp
if add_to_cache or (self._sort_key(object) > latest_key):
self._cache.append(data)
if self._sort_key(object) >= latest_key:
add_to_cache = True
self._updated['__count'] = self._updated.get('__count', 0) + 1
self._updated['__time'] = time.time()
self._clean_object_cache()
class ReverseCachedList(CachedList):
def _insert_into_cache(self, fetched_data):
if not fetched_data:
self._updated['__count'] = self._updated.get('__count', 0) + 1
self._updated['__time'] = time.time()
return
fetched_objects = zip(fetched_data,
map(self._cache_to_obj, fetched_data))
sorted_objects = sorted(fetched_objects, reverse=True,
key=lambda pair: self._sort_key(pair[1]))
timestamp = time.time()
if not self._cache:
for data, object in sorted_objects:
count_key = '%s__count' % (getattr(object, 'id', repr(object)),)
time_key = '%s__time' % (getattr(object, 'id', repr(object)),)
self._updated[count_key] = self._updated.get(count_key, 0) + 1
self._updated[time_key] = timestamp
self._cache.extend(pair[0] for pair in sorted_objects)
else:
latest_key = self._sort_key(self._cache_to_obj(self._cache[-1]))
add_to_cache = self._sort_key(sorted_objects[0][1]) < latest_key
for data, object in sorted_objects:
count_key = '%s__count' % (getattr(object, 'id', repr(object)),)
time_key = '%s__time' % (getattr(object, 'id', repr(object)),)
self._updated[count_key] = self._updated.get(count_key, 0) + 1
self._updated[time_key] = timestamp
if add_to_cache or (self._sort_key(object) < latest_key):
self._cache.append(data)
if self._sort_key(object) <= latest_key:
add_to_cache = True
self._updated['__count'] = self._updated.get('__count', 0) + 1
self._updated['__time'] = time.time()
self._clean_object_cache()
def update_once(method):
"""
Make sure the cache has been updated at least once before calling a method.
This should be used as a decorator, and it wraps a method on a cached object
to make sure that the object's cache has been updated at least once before
the method is called. This allows you to implement lazy evaluation, which
is especially useful when fetching data over the network.
"""
def wrapper(self, *args, **kwargs):
if not self._updated.get('__count', 0):
self._update_cache()
self._updated['__count'] = self._updated.get('__count', 0) + 1
return method(self, *args, **kwargs)
return function_sync(method, wrapper)
def update_on_key(key, always=False):
"""
Make sure the cache has a particular key present before calling a method.
This decorator accepts a key which it will look up in the cache before
calling the wrapped method. If the cache doesn't have the key, it will
perform an update before calling the method. Note that it does not keep
updating the cache until the key is present - this may result in a
non-terminating loop.
You may also pass the decorator an additional keyword, ``always``, which
will tell it whether or not to keep checking for the key every time the
method is called. By default, this is ``False``, which means that the key
will be checked only the first time the method is called. If set to true,
the key will be checked *every* time the method is called.
"""
def wrapper_deco(method):
def wrapper(self, *args, **kwargs):
if always:
if key not in self._cache:
self._update_cache()
return method(self, *args, **kwargs)
elif (key not in self._cache and
(not self._updated.get('key__' + key, False))):
self._update_cache()
self._updated['key__' + key] = True
return method(self, *args, **kwargs)
return function_sync(method, wrapper)
return wrapper_deco
def update_on_time(length):
"""
Update the cache if an amount of time has passed before calling a method.
This decorator accepts a length of time in seconds, and will wrap a method
with a cache-checker. Every time the method is called, the wrapper will
check to see that a certain amount of time has passed. If the time that has
passed is greater than or equal to the specified length, the cache is
updated. Finally, the method is called.
"""
def wrapper_deco(method):
def wrapper(self, *args, **kwargs):
if (time.time() - self._updated.get('__time', 0)) >= length:
self._update_cache()
self._updated['__time'] = time.time()
return method(self, *args, **kwargs)
return function_sync(method, wrapper)
return wrapper_deco
def update_on_count(num):
"""
Update the cache if a method has been called a certain number of times.
This decorator accepts a number, and keeps track of how many times the
method it is wrapping has been called. When the number of calls reaches this
number, the cache is updated.
"""
def wrapper_deco(method):
def wrapper(self, *args, **kwargs):
if self._updated.get('count__' + method.__name__, num) == num:
self._update_cache()
self._updated['count__' + method.__name__] = 1
else:
self._updated['count__' + method] = self._updated.get(
'count__' + method, 0) + 1
return method(self, *args, **kwargs)
return function_sync(method, wrapper)
return wrapper_deco
def simple_map(key):
"""
Shortcut for a typical cacheing use-case.
This is a shortcut for the following pattern::
class SomeCachedObject(CachedObject):
@property
@update_on_key(key_name)
def attrname(self):
return self._cache[key_name]
Instead you can do this::
class SomeCachedObject(CachedObject):
attrname = simple_map(key_name)
"""
return property(update_on_key(key)(lambda self: self._cache[key])) | mit |
jyotsna1820/django | tests/admin_widgets/widgetadmin.py | 68 | 1222 | from django.contrib import admin
from . import models
class WidgetAdmin(admin.AdminSite):
pass
class CarAdmin(admin.ModelAdmin):
list_display = ['make', 'model', 'owner']
list_editable = ['owner']
class CarTireAdmin(admin.ModelAdmin):
def formfield_for_foreignkey(self, db_field, request, **kwargs):
if db_field.name == "car":
kwargs["queryset"] = models.Car.objects.filter(owner=request.user)
return db_field.formfield(**kwargs)
return super(CarTireAdmin, self).formfield_for_foreignkey(db_field, request, **kwargs)
class EventAdmin(admin.ModelAdmin):
raw_id_fields = ['main_band', 'supporting_bands']
class SchoolAdmin(admin.ModelAdmin):
filter_vertical = ('students',)
filter_horizontal = ('alumni',)
site = WidgetAdmin(name='widget-admin')
site.register(models.User)
site.register(models.Car, CarAdmin)
site.register(models.CarTire, CarTireAdmin)
site.register(models.Member)
site.register(models.Band)
site.register(models.Event, EventAdmin)
site.register(models.Album)
site.register(models.Inventory)
site.register(models.Bee)
site.register(models.Advisor)
site.register(models.School, SchoolAdmin)
site.register(models.Profile)
| bsd-3-clause |
tinloaf/home-assistant | homeassistant/components/xiaomi_aqara.py | 3 | 10991 | """
Support for Xiaomi Gateways.
For more details about this component, please refer to the documentation at
https://home-assistant.io/components/xiaomi_aqara/
"""
import logging
from datetime import timedelta
import voluptuous as vol
from homeassistant.components.discovery import SERVICE_XIAOMI_GW
from homeassistant.const import (
ATTR_BATTERY_LEVEL, CONF_HOST, CONF_MAC, CONF_PORT,
EVENT_HOMEASSISTANT_STOP)
from homeassistant.core import callback
from homeassistant.helpers import discovery
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.event import async_track_point_in_utc_time
from homeassistant.util.dt import utcnow
from homeassistant.util import slugify
REQUIREMENTS = ['PyXiaomiGateway==0.11.1']
_LOGGER = logging.getLogger(__name__)
ATTR_GW_MAC = 'gw_mac'
ATTR_RINGTONE_ID = 'ringtone_id'
ATTR_RINGTONE_VOL = 'ringtone_vol'
ATTR_DEVICE_ID = 'device_id'
CONF_DISCOVERY_RETRY = 'discovery_retry'
CONF_GATEWAYS = 'gateways'
CONF_INTERFACE = 'interface'
CONF_KEY = 'key'
CONF_DISABLE = 'disable'
DOMAIN = 'xiaomi_aqara'
PY_XIAOMI_GATEWAY = "xiaomi_gw"
TIME_TILL_UNAVAILABLE = timedelta(minutes=150)
SERVICE_PLAY_RINGTONE = 'play_ringtone'
SERVICE_STOP_RINGTONE = 'stop_ringtone'
SERVICE_ADD_DEVICE = 'add_device'
SERVICE_REMOVE_DEVICE = 'remove_device'
GW_MAC = vol.All(
cv.string,
lambda value: value.replace(':', '').lower(),
vol.Length(min=12, max=12)
)
SERVICE_SCHEMA_PLAY_RINGTONE = vol.Schema({
vol.Required(ATTR_RINGTONE_ID):
vol.All(vol.Coerce(int), vol.NotIn([9, 14, 15, 16, 17, 18, 19])),
vol.Optional(ATTR_RINGTONE_VOL):
vol.All(vol.Coerce(int), vol.Clamp(min=0, max=100))
})
SERVICE_SCHEMA_REMOVE_DEVICE = vol.Schema({
vol.Required(ATTR_DEVICE_ID):
vol.All(cv.string, vol.Length(min=14, max=14))
})
GATEWAY_CONFIG = vol.Schema({
vol.Optional(CONF_MAC, default=None): vol.Any(GW_MAC, None),
vol.Optional(CONF_KEY):
vol.All(cv.string, vol.Length(min=16, max=16)),
vol.Optional(CONF_HOST): cv.string,
vol.Optional(CONF_PORT, default=9898): cv.port,
vol.Optional(CONF_DISABLE, default=False): cv.boolean,
})
def _fix_conf_defaults(config):
"""Update some configuration defaults."""
config['sid'] = config.pop(CONF_MAC, None)
if config.get(CONF_KEY) is None:
_LOGGER.warning(
'Key is not provided for gateway %s. Controlling the gateway '
'will not be possible', config['sid'])
if config.get(CONF_HOST) is None:
config.pop(CONF_PORT)
return config
CONFIG_SCHEMA = vol.Schema({
DOMAIN: vol.Schema({
vol.Optional(CONF_GATEWAYS, default={}):
vol.All(cv.ensure_list, [GATEWAY_CONFIG], [_fix_conf_defaults]),
vol.Optional(CONF_INTERFACE, default='any'): cv.string,
vol.Optional(CONF_DISCOVERY_RETRY, default=3): cv.positive_int
})
}, extra=vol.ALLOW_EXTRA)
def setup(hass, config):
"""Set up the Xiaomi component."""
gateways = []
interface = 'any'
discovery_retry = 3
if DOMAIN in config:
gateways = config[DOMAIN][CONF_GATEWAYS]
interface = config[DOMAIN][CONF_INTERFACE]
discovery_retry = config[DOMAIN][CONF_DISCOVERY_RETRY]
async def xiaomi_gw_discovered(service, discovery_info):
"""Perform action when Xiaomi Gateway device(s) has been found."""
# We don't need to do anything here, the purpose of Home Assistant's
# discovery service is to just trigger loading of this
# component, and then its own discovery process kicks in.
discovery.listen(hass, SERVICE_XIAOMI_GW, xiaomi_gw_discovered)
from xiaomi_gateway import XiaomiGatewayDiscovery
xiaomi = hass.data[PY_XIAOMI_GATEWAY] = XiaomiGatewayDiscovery(
hass.add_job, gateways, interface)
_LOGGER.debug("Expecting %s gateways", len(gateways))
for k in range(discovery_retry):
_LOGGER.info("Discovering Xiaomi Gateways (Try %s)", k + 1)
xiaomi.discover_gateways()
if len(xiaomi.gateways) >= len(gateways):
break
if not xiaomi.gateways:
_LOGGER.error("No gateway discovered")
return False
xiaomi.listen()
_LOGGER.debug("Gateways discovered. Listening for broadcasts")
for component in ['binary_sensor', 'sensor', 'switch', 'light', 'cover',
'lock']:
discovery.load_platform(hass, component, DOMAIN, {}, config)
def stop_xiaomi(event):
"""Stop Xiaomi Socket."""
_LOGGER.info("Shutting down Xiaomi Hub")
xiaomi.stop_listen()
hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, stop_xiaomi)
def play_ringtone_service(call):
"""Service to play ringtone through Gateway."""
ring_id = call.data.get(ATTR_RINGTONE_ID)
gateway = call.data.get(ATTR_GW_MAC)
kwargs = {'mid': ring_id}
ring_vol = call.data.get(ATTR_RINGTONE_VOL)
if ring_vol is not None:
kwargs['vol'] = ring_vol
gateway.write_to_hub(gateway.sid, **kwargs)
def stop_ringtone_service(call):
"""Service to stop playing ringtone on Gateway."""
gateway = call.data.get(ATTR_GW_MAC)
gateway.write_to_hub(gateway.sid, mid=10000)
def add_device_service(call):
"""Service to add a new sub-device within the next 30 seconds."""
gateway = call.data.get(ATTR_GW_MAC)
gateway.write_to_hub(gateway.sid, join_permission='yes')
hass.components.persistent_notification.async_create(
'Join permission enabled for 30 seconds! '
'Please press the pairing button of the new device once.',
title='Xiaomi Aqara Gateway')
def remove_device_service(call):
"""Service to remove a sub-device from the gateway."""
device_id = call.data.get(ATTR_DEVICE_ID)
gateway = call.data.get(ATTR_GW_MAC)
gateway.write_to_hub(gateway.sid, remove_device=device_id)
gateway_only_schema = _add_gateway_to_schema(xiaomi, vol.Schema({}))
hass.services.register(
DOMAIN, SERVICE_PLAY_RINGTONE, play_ringtone_service,
schema=_add_gateway_to_schema(xiaomi, SERVICE_SCHEMA_PLAY_RINGTONE))
hass.services.register(
DOMAIN, SERVICE_STOP_RINGTONE, stop_ringtone_service,
schema=gateway_only_schema)
hass.services.register(
DOMAIN, SERVICE_ADD_DEVICE, add_device_service,
schema=gateway_only_schema)
hass.services.register(
DOMAIN, SERVICE_REMOVE_DEVICE, remove_device_service,
schema=_add_gateway_to_schema(xiaomi, SERVICE_SCHEMA_REMOVE_DEVICE))
return True
class XiaomiDevice(Entity):
"""Representation a base Xiaomi device."""
def __init__(self, device, device_type, xiaomi_hub):
"""Initialize the Xiaomi device."""
self._state = None
self._is_available = True
self._sid = device['sid']
self._name = '{}_{}'.format(device_type, self._sid)
self._type = device_type
self._write_to_hub = xiaomi_hub.write_to_hub
self._get_from_hub = xiaomi_hub.get_from_hub
self._device_state_attributes = {}
self._remove_unavailability_tracker = None
self._xiaomi_hub = xiaomi_hub
self.parse_data(device['data'], device['raw_data'])
self.parse_voltage(device['data'])
if hasattr(self, '_data_key') \
and self._data_key: # pylint: disable=no-member
self._unique_id = slugify("{}-{}".format(
self._data_key, # pylint: disable=no-member
self._sid))
else:
self._unique_id = slugify("{}-{}".format(self._type, self._sid))
def _add_push_data_job(self, *args):
self.hass.add_job(self.push_data, *args)
async def async_added_to_hass(self):
"""Start unavailability tracking."""
self._xiaomi_hub.callbacks[self._sid].append(self._add_push_data_job)
self._async_track_unavailable()
@property
def name(self):
"""Return the name of the device."""
return self._name
@property
def unique_id(self) -> str:
"""Return a unique ID."""
return self._unique_id
@property
def available(self):
"""Return True if entity is available."""
return self._is_available
@property
def should_poll(self):
"""Return the polling state. No polling needed."""
return False
@property
def device_state_attributes(self):
"""Return the state attributes."""
return self._device_state_attributes
@callback
def _async_set_unavailable(self, now):
"""Set state to UNAVAILABLE."""
self._remove_unavailability_tracker = None
self._is_available = False
self.async_schedule_update_ha_state()
@callback
def _async_track_unavailable(self):
if self._remove_unavailability_tracker:
self._remove_unavailability_tracker()
self._remove_unavailability_tracker = async_track_point_in_utc_time(
self.hass, self._async_set_unavailable,
utcnow() + TIME_TILL_UNAVAILABLE)
if not self._is_available:
self._is_available = True
return True
return False
@callback
def push_data(self, data, raw_data):
"""Push from Hub."""
_LOGGER.debug("PUSH >> %s: %s", self, data)
was_unavailable = self._async_track_unavailable()
is_data = self.parse_data(data, raw_data)
is_voltage = self.parse_voltage(data)
if is_data or is_voltage or was_unavailable:
self.async_schedule_update_ha_state()
def parse_voltage(self, data):
"""Parse battery level data sent by gateway."""
if 'voltage' not in data:
return False
max_volt = 3300
min_volt = 2800
voltage = data['voltage']
voltage = min(voltage, max_volt)
voltage = max(voltage, min_volt)
percent = ((voltage - min_volt) / (max_volt - min_volt)) * 100
self._device_state_attributes[ATTR_BATTERY_LEVEL] = round(percent, 1)
return True
def parse_data(self, data, raw_data):
"""Parse data sent by gateway."""
raise NotImplementedError()
def _add_gateway_to_schema(xiaomi, schema):
"""Extend a voluptuous schema with a gateway validator."""
def gateway(sid):
"""Convert sid to a gateway."""
sid = str(sid).replace(':', '').lower()
for gateway in xiaomi.gateways.values():
if gateway.sid == sid:
return gateway
raise vol.Invalid('Unknown gateway sid {}'.format(sid))
gateways = list(xiaomi.gateways.values())
kwargs = {}
# If the user has only 1 gateway, make it the default for services.
if len(gateways) == 1:
kwargs['default'] = gateways[0]
return schema.extend({
vol.Required(ATTR_GW_MAC, **kwargs): gateway
})
| apache-2.0 |
Omegaphora/external_chromium_org | mojo/python/tests/system_unittest.py | 26 | 11412 | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import random
import sys
import time
import unittest
# pylint: disable=F0401
import mojo.embedder
from mojo import system
DATA_SIZE = 1024
def _GetRandomBuffer(size):
random.seed(size)
return bytearray(''.join(chr(random.randint(0, 255)) for i in xrange(size)))
class BaseMojoTest(unittest.TestCase):
def setUp(self):
mojo.embedder.Init()
class CoreTest(BaseMojoTest):
def testResults(self):
self.assertEquals(system.RESULT_OK, 0)
self.assertLess(system.RESULT_CANCELLED, 0)
self.assertLess(system.RESULT_UNKNOWN, 0)
self.assertLess(system.RESULT_INVALID_ARGUMENT, 0)
self.assertLess(system.RESULT_DEADLINE_EXCEEDED, 0)
self.assertLess(system.RESULT_NOT_FOUND, 0)
self.assertLess(system.RESULT_ALREADY_EXISTS, 0)
self.assertLess(system.RESULT_PERMISSION_DENIED, 0)
self.assertLess(system.RESULT_RESOURCE_EXHAUSTED, 0)
self.assertLess(system.RESULT_FAILED_PRECONDITION, 0)
self.assertLess(system.RESULT_ABORTED, 0)
self.assertLess(system.RESULT_OUT_OF_RANGE, 0)
self.assertLess(system.RESULT_UNIMPLEMENTED, 0)
self.assertLess(system.RESULT_INTERNAL, 0)
self.assertLess(system.RESULT_UNAVAILABLE, 0)
self.assertLess(system.RESULT_DATA_LOSS, 0)
self.assertLess(system.RESULT_BUSY, 0)
self.assertLess(system.RESULT_SHOULD_WAIT, 0)
def testConstants(self):
self.assertGreaterEqual(system.DEADLINE_INDEFINITE, 0)
self.assertGreaterEqual(system.HANDLE_SIGNAL_NONE, 0)
self.assertGreaterEqual(system.HANDLE_SIGNAL_READABLE, 0)
self.assertGreaterEqual(system.HANDLE_SIGNAL_WRITABLE, 0)
self.assertGreaterEqual(system.WRITE_MESSAGE_FLAG_NONE, 0)
self.assertGreaterEqual(system.READ_MESSAGE_FLAG_NONE, 0)
self.assertGreaterEqual(system.READ_MESSAGE_FLAG_MAY_DISCARD, 0)
self.assertGreaterEqual(system.WRITE_DATA_FLAG_NONE, 0)
self.assertGreaterEqual(system.WRITE_DATA_FLAG_ALL_OR_NONE, 0)
self.assertGreaterEqual(system.READ_DATA_FLAG_NONE, 0)
self.assertGreaterEqual(system.READ_DATA_FLAG_ALL_OR_NONE, 0)
self.assertGreaterEqual(system.READ_DATA_FLAG_DISCARD, 0)
self.assertGreaterEqual(system.READ_DATA_FLAG_QUERY, 0)
self.assertGreaterEqual(system.MAP_BUFFER_FLAG_NONE, 0)
def testGetTimeTicksNow(self):
pt1 = time.time()
v1 = system.GetTimeTicksNow()
time.sleep(1e-3)
v2 = system.GetTimeTicksNow()
pt2 = time.time()
self.assertGreater(v1, 0)
self.assertGreater(v2, v1 + 1000)
self.assertGreater(1e6 * (pt2 - pt1), v2 - v1)
def _testHandlesCreation(self, *args):
for handle in args:
self.assertTrue(handle.IsValid())
handle.Close()
self.assertFalse(handle.IsValid())
def _TestMessageHandleCreation(self, handles):
self._testHandlesCreation(handles.handle0, handles.handle1)
def testCreateMessagePipe(self):
self._TestMessageHandleCreation(system.MessagePipe())
def testCreateMessagePipeWithNoneOptions(self):
self._TestMessageHandleCreation(system.MessagePipe(None))
def testCreateMessagePipeWithOptions(self):
self._TestMessageHandleCreation(
system.MessagePipe(system.CreateMessagePipeOptions()))
def testWaitOverMessagePipe(self):
handles = system.MessagePipe()
handle = handles.handle0
self.assertEquals(system.RESULT_OK, handle.Wait(
system.HANDLE_SIGNAL_WRITABLE, system.DEADLINE_INDEFINITE))
self.assertEquals(system.RESULT_DEADLINE_EXCEEDED,
handle.Wait(system.HANDLE_SIGNAL_READABLE, 0))
handles.handle1.WriteMessage()
self.assertEquals(
system.RESULT_OK,
handle.Wait(
system.HANDLE_SIGNAL_READABLE,
system.DEADLINE_INDEFINITE))
def testWaitOverManyMessagePipe(self):
handles = system.MessagePipe()
handle0 = handles.handle0
handle1 = handles.handle1
self.assertEquals(
0,
system.WaitMany(
[(handle0, system.HANDLE_SIGNAL_WRITABLE),
(handle1, system.HANDLE_SIGNAL_WRITABLE)],
system.DEADLINE_INDEFINITE))
self.assertEquals(
system.RESULT_DEADLINE_EXCEEDED,
system.WaitMany(
[(handle0, system.HANDLE_SIGNAL_READABLE),
(handle1, system.HANDLE_SIGNAL_READABLE)], 0))
handle0.WriteMessage()
self.assertEquals(
1,
system.WaitMany(
[(handle0, system.HANDLE_SIGNAL_READABLE),
(handle1, system.HANDLE_SIGNAL_READABLE)],
system.DEADLINE_INDEFINITE))
def testSendBytesOverMessagePipe(self):
handles = system.MessagePipe()
data = _GetRandomBuffer(DATA_SIZE)
handles.handle0.WriteMessage(data)
(res, buffers, next_message) = handles.handle1.ReadMessage()
self.assertEquals(system.RESULT_RESOURCE_EXHAUSTED, res)
self.assertEquals(None, buffers)
self.assertEquals((DATA_SIZE, 0), next_message)
result = bytearray(DATA_SIZE)
(res, buffers, next_message) = handles.handle1.ReadMessage(result)
self.assertEquals(system.RESULT_OK, res)
self.assertEquals(None, next_message)
self.assertEquals((data, []), buffers)
def testSendEmptyDataOverMessagePipe(self):
handles = system.MessagePipe()
handles.handle0.WriteMessage(None)
(res, buffers, next_message) = handles.handle1.ReadMessage()
self.assertEquals(system.RESULT_OK, res)
self.assertEquals(None, next_message)
self.assertEquals((None, []), buffers)
def testSendHandleOverMessagePipe(self):
handles = system.MessagePipe()
handles_to_send = system.MessagePipe()
handles.handle0.WriteMessage(handles=[handles_to_send.handle0,
handles_to_send.handle1])
(res, buffers, next_message) = handles.handle1.ReadMessage(
max_number_of_handles=2)
self.assertFalse(handles_to_send.handle0.IsValid())
self.assertFalse(handles_to_send.handle1.IsValid())
self.assertEquals(system.RESULT_OK, res)
self.assertEquals(None, next_message)
self.assertEquals(None, buffers[0])
self.assertEquals(2, len(buffers[1]))
handles = buffers[1]
for handle in handles:
self.assertTrue(handle.IsValid())
(res, buffers, next_message) = handle.ReadMessage()
self.assertEquals(system.RESULT_SHOULD_WAIT, res)
for handle in handles:
handle.WriteMessage()
for handle in handles:
(res, buffers, next_message) = handle.ReadMessage()
self.assertEquals(system.RESULT_OK, res)
def _TestDataHandleCreation(self, handles):
self._testHandlesCreation(
handles.producer_handle, handles.consumer_handle)
def testCreateDataPipe(self):
self._TestDataHandleCreation(system.DataPipe())
def testCreateDataPipeWithNoneOptions(self):
self._TestDataHandleCreation(system.DataPipe(None))
def testCreateDataPipeWithDefaultOptions(self):
self._TestDataHandleCreation(
system.DataPipe(system.CreateDataPipeOptions()))
def testCreateDataPipeWithDiscardFlag(self):
options = system.CreateDataPipeOptions()
options.flags = system.CreateDataPipeOptions.FLAG_MAY_DISCARD
self._TestDataHandleCreation(system.DataPipe(options))
def testCreateDataPipeWithElementSize(self):
options = system.CreateDataPipeOptions()
options.element_num_bytes = 5
self._TestDataHandleCreation(system.DataPipe(options))
def testCreateDataPipeWithCapacity(self):
options = system.CreateDataPipeOptions()
options.element_capacity_num_bytes = DATA_SIZE
self._TestDataHandleCreation(system.DataPipe(options))
def testCreateDataPipeWithIncorrectParameters(self):
options = system.CreateDataPipeOptions()
options.element_num_bytes = 5
options.capacity_num_bytes = DATA_SIZE
with self.assertRaises(system.MojoException) as cm:
self._TestDataHandleCreation(system.DataPipe(options))
self.assertEquals(system.RESULT_INVALID_ARGUMENT, cm.exception.mojo_result)
def testSendEmptyDataOverDataPipe(self):
pipes = system.DataPipe()
self.assertEquals((system.RESULT_OK, 0), pipes.producer_handle.WriteData())
self.assertEquals(
(system.RESULT_OK, None), pipes.consumer_handle.ReadData())
def testSendDataOverDataPipe(self):
pipes = system.DataPipe()
data = _GetRandomBuffer(DATA_SIZE)
self.assertEquals((system.RESULT_OK, DATA_SIZE),
pipes.producer_handle.WriteData(data))
self.assertEquals((system.RESULT_OK, data),
pipes.consumer_handle.ReadData(bytearray(DATA_SIZE)))
def testTwoPhaseWriteOnDataPipe(self):
pipes = system.DataPipe()
(res, buf) = pipes.producer_handle.BeginWriteData(DATA_SIZE)
self.assertEquals(system.RESULT_OK, res)
self.assertGreaterEqual(len(buf.buffer), DATA_SIZE)
data = _GetRandomBuffer(DATA_SIZE)
buf.buffer[0:DATA_SIZE] = data
self.assertEquals(system.RESULT_OK, buf.End(DATA_SIZE))
self.assertEquals((system.RESULT_OK, data),
pipes.consumer_handle.ReadData(bytearray(DATA_SIZE)))
def testTwoPhaseReadOnDataPipe(self):
pipes = system.DataPipe()
data = _GetRandomBuffer(DATA_SIZE)
self.assertEquals((system.RESULT_OK, DATA_SIZE),
pipes.producer_handle.WriteData(data))
(res, buf) = pipes.consumer_handle.BeginReadData()
self.assertEquals(system.RESULT_OK, res)
self.assertEquals(DATA_SIZE, len(buf.buffer))
self.assertEquals(data, buf.buffer)
self.assertEquals(system.RESULT_OK, buf.End(DATA_SIZE))
def testCreateSharedBuffer(self):
self._testHandlesCreation(system.CreateSharedBuffer(DATA_SIZE))
def testCreateSharedBufferWithNoneOptions(self):
self._testHandlesCreation(system.CreateSharedBuffer(DATA_SIZE, None))
def testCreateSharedBufferWithDefaultOptions(self):
self._testHandlesCreation(
system.CreateSharedBuffer(
DATA_SIZE,
system.CreateSharedBufferOptions()))
def testDuplicateSharedBuffer(self):
handle = system.CreateSharedBuffer(DATA_SIZE)
self._testHandlesCreation(handle.Duplicate())
def testDuplicateSharedBufferWithNoneOptions(self):
handle = system.CreateSharedBuffer(DATA_SIZE)
self._testHandlesCreation(handle.Duplicate(None))
def testDuplicateSharedBufferWithDefaultOptions(self):
handle = system.CreateSharedBuffer(DATA_SIZE)
self._testHandlesCreation(
handle.Duplicate(system.DuplicateSharedBufferOptions()))
def testSendBytesOverSharedBuffer(self):
handle = system.CreateSharedBuffer(DATA_SIZE)
duplicated = handle.Duplicate()
data = _GetRandomBuffer(DATA_SIZE)
(res1, buf1) = handle.Map(0, DATA_SIZE)
(res2, buf2) = duplicated.Map(0, DATA_SIZE)
self.assertEquals(system.RESULT_OK, res1)
self.assertEquals(system.RESULT_OK, res2)
self.assertEquals(DATA_SIZE, len(buf1.buffer))
self.assertEquals(DATA_SIZE, len(buf2.buffer))
self.assertEquals(buf1.buffer, buf2.buffer)
buf1.buffer[:] = data
self.assertEquals(data, buf1.buffer)
self.assertEquals(data, buf2.buffer)
self.assertEquals(buf1.buffer, buf2.buffer)
if __name__ == '__main__':
suite = unittest.TestLoader().loadTestsFromTestCase(CoreTest)
test_results = unittest.TextTestRunner(verbosity=0).run(suite)
if not test_results.wasSuccessful():
sys.exit(1)
sys.exit(0)
| bsd-3-clause |
botswana-harvard/bhp065_project | bhp065/apps/hnscc_subject/admin/enrollment_admin.py | 1 | 1104 | from django.contrib import admin
from edc.base.modeladmin.admin import BaseModelAdmin
from ..forms import EnrollmentForm
from ..models import Enrollment
class EnrollmentAdmin(BaseModelAdmin):
dashboard_type = 'subject'
form = EnrollmentForm
def __init__(self, *args, **kwargs):
super(EnrollmentAdmin, self).__init__(*args, **kwargs)
self.fields = [
'report_datetime',
'pathology_no',
'gender',
'age',
'hiv_status',
'smoking_status',
"bpcc_enrolled",
"bid_number", ]
self.list_display = ("registered_subject", "pathology_no", "gender", "age",
"bpcc_enrolled", "bid_number")
self.radio_fields = {'gender': admin.VERTICAL,
'hiv_status': admin.VERTICAL,
'smoking_status': admin.VERTICAL,
"bpcc_enrolled": admin.VERTICAL, }
self.list_filter = ('gender', "bpcc_enrolled", "report_datetime")
admin.site.register(Enrollment, EnrollmentAdmin)
| gpl-2.0 |
iogf/vy | vyapp/plugins/ycmd/ycm_extra_conf.py | 6 | 8197 | # This file is NOT licensed under the GPLv3, which is the license for the rest
# of YouCompleteMe.
#
# Here's the license text for this file:
#
# This is free and unencumbered software released into the public domain.
#
# Anyone is free to copy, modify, publish, use, compile, sell, or
# distribute this software, either in source code form or as a compiled
# binary, for any purpose, commercial or non-commercial, and by any
# means.
#
# In jurisdictions that recognize copyright laws, the author or authors
# of this software dedicate any and all copyright interest in the
# software to the public domain. We make this dedication for the benefit
# of the public at large and to the detriment of our heirs and
# successors. We intend this dedication to be an overt act of
# relinquishment in perpetuity of all present and future rights to this
# software under copyright law.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
# ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
#
# For more information, please refer to <http://unlicense.org/>
from distutils.sysconfig import get_python_inc
import platform
import os.path as p
import subprocess
import ycm_core
DIR_OF_THIS_SCRIPT = p.abspath( p.dirname( __file__ ) )
DIR_OF_THIRD_PARTY = p.join( DIR_OF_THIS_SCRIPT, 'third_party' )
SOURCE_EXTENSIONS = [ '.cpp', '.cxx', '.cc', '.c', '.m', '.mm' ]
# These are the compilation flags that will be used in case there's no
# compilation database set (by default, one is not set).
# CHANGE THIS LIST OF FLAGS. YES, THIS IS THE DROID YOU HAVE BEEN LOOKING FOR.
flags = [
'-Wall',
'-Wextra',
'-Werror',
'-Wno-long-long',
'-Wno-variadic-macros',
'-fexceptions',
'-DNDEBUG',
# You 100% do NOT need -DUSE_CLANG_COMPLETER and/or -DYCM_EXPORT in your flags;
# only the YCM source code needs it.
'-DUSE_CLANG_COMPLETER',
'-DYCM_EXPORT=',
# THIS IS IMPORTANT! Without the '-x' flag, Clang won't know which language to
# use when compiling headers. So it will guess. Badly. So C++ headers will be
# compiled as C headers. You don't want that so ALWAYS specify the '-x' flag.
# For a C project, you would set this to 'c' instead of 'c++'.
'-x',
'c++',
'-isystem',
'cpp/pybind11',
'-isystem',
'cpp/whereami',
'-isystem',
'cpp/BoostParts',
'-isystem',
get_python_inc(),
'-isystem',
'cpp/llvm/include',
'-isystem',
'cpp/llvm/tools/clang/include',
'-I',
'cpp/ycm',
'-I',
'cpp/ycm/ClangCompleter',
'-isystem',
'cpp/ycm/tests/gmock/gtest',
'-isystem',
'cpp/ycm/tests/gmock/gtest/include',
'-isystem',
'cpp/ycm/tests/gmock',
'-isystem',
'cpp/ycm/tests/gmock/include',
'-isystem',
'cpp/ycm/benchmarks/benchmark/include',
]
# Clang automatically sets the '-std=' flag to 'c++14' for MSVC 2015 or later,
# which is required for compiling the standard library, and to 'c++11' for older
# versions.
if platform.system() != 'Windows':
flags.append( '-std=c++11' )
# Set this to the absolute path to the folder (NOT the file!) containing the
# compile_commands.json file to use that instead of 'flags'. See here for
# more details: http://clang.llvm.org/docs/JSONCompilationDatabase.html
#
# You can get CMake to generate this file for you by adding:
# set( CMAKE_EXPORT_COMPILE_COMMANDS 1 )
# to your CMakeLists.txt file.
#
# Most projects will NOT need to set this to anything; you can just change the
# 'flags' list of compilation flags. Notice that YCM itself uses that approach.
compilation_database_folder = ''
if p.exists( compilation_database_folder ):
database = ycm_core.CompilationDatabase( compilation_database_folder )
else:
database = None
def IsHeaderFile( filename ):
extension = p.splitext( filename )[ 1 ]
return extension in [ '.h', '.hxx', '.hpp', '.hh' ]
def FindCorrespondingSourceFile( filename ):
if IsHeaderFile( filename ):
basename = p.splitext( filename )[ 0 ]
for extension in SOURCE_EXTENSIONS:
replacement_file = basename + extension
if p.exists( replacement_file ):
return replacement_file
return filename
def PathToPythonUsedDuringBuild():
try:
filepath = p.join( DIR_OF_THIS_SCRIPT, 'PYTHON_USED_DURING_BUILDING' )
with open( filepath ) as f:
return f.read().strip()
# We need to check for IOError for Python 2 and OSError for Python 3.
except ( IOError, OSError ):
return None
def Settings( **kwargs ):
language = kwargs[ 'language' ]
if language == 'cfamily':
# If the file is a header, try to find the corresponding source file and
# retrieve its flags from the compilation database if using one. This is
# necessary since compilation databases don't have entries for header files.
# In addition, use this source file as the translation unit. This makes it
# possible to jump from a declaration in the header file to its definition
# in the corresponding source file.
filename = FindCorrespondingSourceFile( kwargs[ 'filename' ] )
if not database:
return {
'flags': flags,
'include_paths_relative_to_dir': DIR_OF_THIS_SCRIPT,
'override_filename': filename
}
compilation_info = database.GetCompilationInfoForFile( filename )
if not compilation_info.compiler_flags_:
return {}
# Bear in mind that compilation_info.compiler_flags_ does NOT return a
# python list, but a "list-like" StringVec object.
final_flags = list( compilation_info.compiler_flags_ )
# NOTE: This is just for YouCompleteMe; it's highly likely that your project
# does NOT need to remove the stdlib flag. DO NOT USE THIS IN YOUR
# ycm_extra_conf IF YOU'RE NOT 100% SURE YOU NEED IT.
try:
final_flags.remove( '-stdlib=libc++' )
except ValueError:
pass
return {
'flags': final_flags,
'include_paths_relative_to_dir': compilation_info.compiler_working_dir_,
'override_filename': filename
}
if language == 'python':
return {
'interpreter_path': PathToPythonUsedDuringBuild()
}
return {}
def GetStandardLibraryIndexInSysPath( sys_path ):
for index, path in enumerate( sys_path ):
if p.isfile( p.join( path, 'os.py' ) ):
return index
raise RuntimeError( 'Could not find standard library path in Python path.' )
def PythonSysPath( **kwargs ):
sys_path = kwargs[ 'sys_path' ]
interpreter_path = kwargs[ 'interpreter_path' ]
major_version = subprocess.check_output( [
interpreter_path, '-c', 'import sys; print( sys.version_info[ 0 ] )' ]
).rstrip().decode( 'utf8' )
sys_path.insert( GetStandardLibraryIndexInSysPath( sys_path ) + 1,
p.join( DIR_OF_THIRD_PARTY, 'python-future', 'src' ) )
sys_path[ 0:0 ] = [ p.join( DIR_OF_THIS_SCRIPT ),
p.join( DIR_OF_THIRD_PARTY, 'bottle' ),
p.join( DIR_OF_THIRD_PARTY, 'cregex',
'regex_{}'.format( major_version ) ),
p.join( DIR_OF_THIRD_PARTY, 'frozendict' ),
p.join( DIR_OF_THIRD_PARTY, 'jedi_deps', 'jedi' ),
p.join( DIR_OF_THIRD_PARTY, 'jedi_deps', 'numpydoc' ),
p.join( DIR_OF_THIRD_PARTY, 'jedi_deps', 'parso' ),
p.join( DIR_OF_THIRD_PARTY, 'requests_deps', 'requests' ),
p.join( DIR_OF_THIRD_PARTY, 'requests_deps',
'urllib3',
'src' ),
p.join( DIR_OF_THIRD_PARTY, 'requests_deps',
'chardet' ),
p.join( DIR_OF_THIRD_PARTY, 'requests_deps',
'certifi' ),
p.join( DIR_OF_THIRD_PARTY, 'requests_deps',
'idna' ),
p.join( DIR_OF_THIRD_PARTY, 'waitress' ) ]
return sys_path
| mit |
Algomorph/gpxanalyzer | gpxanalyzer/filters/color_structure_pythonic.py | 1 | 10874 | '''
Created on Apr 25, 2014
@author: Gregory Kramida
@copyright: (c) Gregory Kramida 2014
@license: GNU v3
'''
import gpxanalyzer.gpxanalyzer_internals as gi
import numpy as np
import sys
import math
import timeit
amplitude_thresholds = np.array([0.0, 0.000000000001, 0.037, 0.08, 0.195, 0.32],dtype=np.float64)
n_amplitude_levels = np.array([1, 25, 20, 35, 35, 140]);
difference_thresholds = np.array([
[0, 6, 60, 110, 256, -1],
[0, 6, 20, 60, 110, 256],
[0, 6, 20, 60, 110, 256],
[0, 6, 20, 60, 110, 256]], dtype=np.int16)
n_hue_levels = np.array([
[1, 4, 4, 4, 0],
[1, 4, 4, 8, 8],
[1, 4, 8, 8, 8],
[1, 4, 16, 16, 16]], dtype=np.uint8)
n_sum_levels = np.array([
[8, 4, 1, 1, 0],
[8, 4, 4, 2, 1],
[16, 4, 4, 4, 4],
[32, 8, 4, 4, 4]], dtype=np.uint8)
n_cum_levels = np.array([
[24, 8, 4, 0, 0],
[56, 40, 24, 8, 0],
[112, 96, 64, 32, 0],
[224, 192, 128, 64, 0]], dtype=np.uint8)
def reshape_bitstrings(bts):
return bts.transpose((1,0,2)).reshape((bts.shape[1],bts.shape[1],bts.shape[2]*2))
def convert_RGB2HMMD(raster):
out = np.zeros(raster.shape, dtype = np.int16)
for y in xrange(raster.shape[0]):
for x in xrange(raster.shape[1]):
(R,G,B) = raster[y,x,:].astype(np.int32)
mx=R
if(mx<G): mx=G
if(mx<B): mx=B
mn=R
if(mn>G): mn=G
if(mn>B): mn=B
if (mx == mn): # ( R == G == B )//exactly gray
hue = -1; #hue is undefined
else:
#solve Hue
if(R==mx):
hue=float(G-B)* 60.0/(mx-mn)
elif(G==mx):
hue=120.0+float(B-R)* 60.0/(mx-mn)
elif(B==mx):
hue=240.0+float(R-G)* 60.0/(mx-mn)
if(hue<0.0): hue+=360.0
H = int(hue + 0.5) #range [0,360]
S = int((mx + mn)/2.0 + 0.5) #range [0,255]
D = mx - mn #range [0,255]
out[y,x,:] = (H,S,D)
return out
def to_bitstring(arr):
bts = np.zeros((8),np.uint32)
for bn in arr:
idxUint = bn >> 5
idxBit = bn - (idxUint << 5)
bts[idxUint] |= (1 << idxBit)
return bts
def extract_row_bitstrings(quant_cell):
bitstrings = np.zeros((quant_cell.shape[0]*2,quant_cell.shape[1],4),dtype=np.uint32)
for ix_row in xrange(0,quant_cell.shape[0]):
row = quant_cell[ix_row]
for ix_bt in xrange(0, quant_cell.shape[1]-7):
bt = to_bitstring(row[ix_bt:ix_bt+8])
ix_ins = ix_bt<<1
bitstrings[ix_ins,ix_row] = bt[0:4]
bitstrings[ix_ins+1,ix_row] = bt[4:8]
return bitstrings
def check_row_bitstrings(quant_cell,row_bitstrings, raise_exception = False):
rb = row_bitstrings
for ix_row in xrange(0,row_bitstrings.shape[0]- gi.WINDOW_SIZE*2 + 2,2):
for ix_col in xrange(0,row_bitstrings.shape[1]):
bitstring = rb[ix_row:ix_row+2,ix_col]
x = ix_row / 2
y = ix_col
sample = quant_cell[y,x:x+gi.WINDOW_SIZE].copy()
vals = bitstring_vals(bitstring)
sample = np.unique(sample)
sample.sort()
vals.sort()
if(not np.array_equal(sample, vals)):
if(raise_exception):
raise ValueError("Row bitstring failure at x,y: {0:d},{1:d}".format(x,y))
else:
return False
return True
def agg_bitstrings(bitstring_arr):
if(len(bitstring_arr.shape) > 2):
bitstring_arr = bitstring_arr.transpose(1,0,2).reshape((8,-1))
agg = np.array([0,0,0,0,0,0,0,0],dtype=np.uint32)
for bitstring in bitstring_arr:
agg |= bitstring
return agg
def extract_window_bitstrings(row_bitstrings):
bitstrings = np.zeros_like(row_bitstrings)
for ix_row in xrange(0,row_bitstrings.shape[0],2):
for ix_col in xrange(0,row_bitstrings.shape[1]-7):
chunk = row_bitstrings[ix_row:ix_row+2,ix_col:ix_col+8]
bitstring = agg_bitstrings(chunk)
bitstrings[ix_row,ix_col] = bitstring[0:4]
bitstrings[ix_row+1,ix_col] = bitstring[4:8]
return bitstrings
def check_window_bitstrings(quant_cell,window_bitstrings, raise_exception = False):
wb = window_bitstrings
for ix_row in xrange(0,window_bitstrings.shape[0]-gi.WINDOW_SIZE+1):
for ix_col in xrange(0,window_bitstrings.shape[1]-gi.WINDOW_SIZE+1):
bitstring = wb[ix_row,ix_col]
y = ix_row
x = ix_col
sample = np.unique(quant_cell[y:y+gi.WINDOW_SIZE,x:x+gi.WINDOW_SIZE])
vals = bitstring_vals(bitstring)
if(not np.array_equal(sample, vals)):
if(raise_exception):
raise ValueError("Window bitstring failure at x,y: {0:d},{1:d}".format(x,y))
else:
return False
return True
def extract_histogram(quant_cell, x, y, verbose = False, first_n_cols = None, first_n_rows = None):
region = quant_cell[x:x+gi.REGION_SIZE,y:y+gi.REGION_SIZE]
descr = np.zeros((gi.BASE_QUANT_SPACE,),dtype=np.uint16)
stop_at_col = gi.REGION_CLIP
if first_n_cols != None:
stop_at_col = first_n_cols
stop_at_row = gi.REGION_CLIP
if first_n_rows != None:
stop_at_row = first_n_rows
for ix_col in xrange(0,stop_at_col):
hist = np.zeros((gi.BASE_QUANT_SPACE,),dtype=np.int32)
for ix_row in xrange(0,gi.WINDOW_SIZE):
for ix_wincol in xrange(ix_col,ix_col + gi.WINDOW_SIZE):
hist[region[ix_row,ix_wincol]]+=1
for ix in xrange(0,gi.BASE_QUANT_SPACE):
descr[ix] += int(hist[ix] > 0)
for ix_row in xrange(gi.WINDOW_SIZE,stop_at_row + gi.WINDOW_SIZE-1):
ix_row_sub = ix_row - gi.WINDOW_SIZE
for ix_wincol in xrange(ix_col,ix_col + gi.WINDOW_SIZE):
hist[region[ix_row_sub,ix_wincol]]-=1
hist[region[ix_row,ix_wincol]]+=1
for ix in xrange(0,gi.BASE_QUANT_SPACE):
descr[ix] += int(hist[ix] > 0)
if(verbose):
print "Finished column {0:d} out of {1:d}".format(ix_col+1, stop_at_col),
sys.stdout.flush()
print "\r",
return descr
def quantize_amplitude(descriptor):
des = descriptor
norm = gi.REGION_NORM
n_total_levels = n_amplitude_levels.sum()
des_out = np.zeros(des.shape,dtype=np.uint8)
for i_bin in xrange(0,des.size):
val = float(des[i_bin]) / norm
quant_val = 0
i_quant = 0
while (i_quant+1 < amplitude_thresholds.size and val >= amplitude_thresholds[i_quant+1]):
quant_val += n_amplitude_levels[i_quant]
i_quant+=1
next_thresh = amplitude_thresholds[i_quant+1] if i_quant+1 < n_amplitude_levels.size else 1.0
val = int(quant_val +
(val - amplitude_thresholds[i_quant]) *
(n_amplitude_levels[i_quant] / (next_thresh - amplitude_thresholds[i_quant])))
if(val == n_total_levels):
val = n_total_levels - 1
des_out[i_bin] = val
return des_out
def bitstrings_to_histogram(window_bitstrings,x,y, verbose = False):
chunk = window_bitstrings[y:y+gi.REGION_CLIP,x:x+gi.REGION_CLIP];
descr = np.zeros((gi.BASE_QUANT_SPACE),dtype=np.uint16)
i_row = 0
for row in chunk:
for bitstring in row:
vals = bitstring_vals(bitstring)
for val in vals:
descr[val]+=1
if verbose:
i_row+=1
print "Finished column {0:d} out of {1:d}".format(i_row, gi.REGION_CLIP),
sys.stdout.flush()
print "\r",
return descr
def bitstring_vals(bitstring_arr):
if(len(bitstring_arr.shape) > 1):
bitstring_arr = bitstring_arr.flatten()
vals = []
for ix_uint in range(0,8):
uint = bitstring_arr[ix_uint]
addend = (ix_uint << 5)
for bit_ix in range(0,32):
if(uint & (1 << bit_ix)):
vals.append(addend + bit_ix)
return np.uint8(vals)
def tune_group_size(stmt):
for x in xrange(0,9):
y = 0
while((2**x)*(2**y) <= 512):
size_x = 2**x
size_y = 2**y
stmt = "evt = extr.program.zeroOutImage(mgr.queue,extr.bitstring_buffer.shape,({0:d},{1:d})"+\
",extr.bitstring_buffer); cl.wait_for_events([evt])".format(size_x,size_y)
timeit.timeit(stmt)
y+=1
def hist_bin(raster):
log_area = math.log(float(raster.size),2)
scale_power = max(int(0.5 * log_area - 8 + 0.5),0)
subsample = 1 << scale_power
window_width = 8 * subsample
window_height = 8 * subsample
mod_width = raster.shape[1] - (window_width - 1)
mod_height = raster.shape[0] - (window_height - 1)
hist = np.zeros((256),dtype=np.uint64)
descr = np.zeros((256),dtype=np.uint64)
for col in xrange(0,mod_width,subsample):
hist[:] = 0
stop_at = col + window_width
for row in xrange(0,window_height,subsample):
for loc in xrange(col,stop_at,subsample):
val = raster[row,loc]
hist[val]+=1
for ix in xrange(0,len(hist)):
if(hist[ix]):
descr[ix] +=1
for row in xrange(subsample,mod_height,subsample):
del_row = row - subsample
add_row = row + window_height - subsample
for loc in xrange(col,stop_at,subsample):
del_val = raster[del_row,loc]
add_val = raster[add_row,loc]
hist[del_val]-=1
hist[add_val]+=1
for ix in xrange(0,len(hist)):
if(hist[ix]):
descr[ix] +=1
return descr
def quantize_HMMD(raster):
out = np.zeros((raster.shape[0],raster.shape[1]), dtype = np.uint8)
N = 3
for y in xrange(raster.shape[0]):
for x in xrange(raster.shape[1]):
(H,S,D) = raster[y,x]
iSub = 0
while(difference_thresholds[N,iSub + 1] <= D):
iSub +=1
Hindex = int((H / 360.0) * n_hue_levels[N,iSub]);
if (H == 360):
Hindex = 0
Sindex = int(math.floor((S - 0.5*difference_thresholds[N,iSub])
* n_sum_levels[N,iSub]
/ (255 - difference_thresholds[N,iSub])))
if Sindex >= n_sum_levels[N,iSub]:
Sindex = n_sum_levels[N,iSub] - 1
px = n_cum_levels[N,iSub] + Hindex*n_sum_levels[N,iSub] + Sindex
out[y,x] = px
return out | gpl-3.0 |
inwotep/lava-android-test | lava_android_test/test_definitions/install_prep_4bench.py | 2 | 1731 | # Copyright (c) 2012 Linaro
# Author: Linaro Validation Team <linaro-dev@lists.linaro.org>
#
# This file is part of LAVA Android Test.
#
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Remove the Linaro wallpaper before start the benchmark test
**URL:** None
**Default options:** None
"""
import lava_android_test.config
import lava_android_test.testdef
test_name = 'install_prep_4bench'
INSTALL_STEPS_ADB_PRE = []
ADB_SHELL_STEPS = ['rm /data/system/wallpaper_info.xml',
"echo install_prep_4bench.wallpaper: PASS"]
PATTERN = "^\s*(?P<test_case_id>[^:]+?):\s+(?P<result>(PASS|FAIL)?)\s*$"
inst = lava_android_test.testdef.AndroidTestInstaller(
steps_adb_pre=INSTALL_STEPS_ADB_PRE)
run = lava_android_test.testdef.AndroidTestRunner(
adbshell_steps=ADB_SHELL_STEPS)
parser = lava_android_test.testdef.AndroidTestParser(PATTERN)
testobj = lava_android_test.testdef.AndroidTest(testname=test_name,
installer=inst,
runner=run,
parser=parser)
| gpl-3.0 |
ravenland/ycmWinRepo | python/ycm/client/command_request.py | 10 | 3213 | #!/usr/bin/env python
#
# Copyright (C) 2013 Google Inc.
#
# This file is part of YouCompleteMe.
#
# YouCompleteMe is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# YouCompleteMe is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with YouCompleteMe. If not, see <http://www.gnu.org/licenses/>.
import vim
from ycm.client.base_request import BaseRequest, BuildRequestData, ServerError
from ycm import vimsupport
from ycmd.utils import ToUtf8IfNeeded
def _EnsureBackwardsCompatibility( arguments ):
if arguments and arguments[ 0 ] == 'GoToDefinitionElseDeclaration':
arguments[ 0 ] = 'GoTo'
return arguments
class CommandRequest( BaseRequest ):
def __init__( self, arguments, completer_target = None ):
super( CommandRequest, self ).__init__()
self._arguments = _EnsureBackwardsCompatibility( arguments )
self._completer_target = ( completer_target if completer_target
else 'filetype_default' )
self._is_goto_command = (
self._arguments and self._arguments[ 0 ].startswith( 'GoTo' ) )
self._response = None
def Start( self ):
request_data = BuildRequestData()
request_data.update( {
'completer_target': self._completer_target,
'command_arguments': self._arguments
} )
try:
self._response = self.PostDataToHandler( request_data,
'run_completer_command' )
except ServerError as e:
vimsupport.PostVimMessage( e )
def Response( self ):
return self._response
def RunPostCommandActionsIfNeeded( self ):
if not self._is_goto_command or not self.Done() or not self._response:
return
if isinstance( self._response, list ):
defs = [ _BuildQfListItem( x ) for x in self._response ]
vim.eval( 'setqflist( %s )' % repr( defs ) )
vim.eval( 'youcompleteme#OpenGoToList()' )
else:
vimsupport.JumpToLocation( self._response[ 'filepath' ],
self._response[ 'line_num' ],
self._response[ 'column_num' ] )
def SendCommandRequest( arguments, completer ):
request = CommandRequest( arguments, completer )
# This is a blocking call.
request.Start()
request.RunPostCommandActionsIfNeeded()
return request.Response()
def _BuildQfListItem( goto_data_item ):
qf_item = {}
if 'filepath' in goto_data_item:
qf_item[ 'filename' ] = ToUtf8IfNeeded( goto_data_item[ 'filepath' ] )
if 'description' in goto_data_item:
qf_item[ 'text' ] = ToUtf8IfNeeded( goto_data_item[ 'description' ] )
if 'line_num' in goto_data_item:
qf_item[ 'lnum' ] = goto_data_item[ 'line_num' ]
if 'column_num' in goto_data_item:
qf_item[ 'col' ] = goto_data_item[ 'column_num' ] - 1
return qf_item
| gpl-3.0 |
dsb9938/Rezound-ICS-Kernel-Old | Documentation/networking/cxacru-cf.py | 14668 | 1626 | #!/usr/bin/env python
# Copyright 2009 Simon Arlott
#
# This program is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the Free
# Software Foundation; either version 2 of the License, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
# more details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 59
# Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
# Usage: cxacru-cf.py < cxacru-cf.bin
# Output: values string suitable for the sysfs adsl_config attribute
#
# Warning: cxacru-cf.bin with MD5 hash cdbac2689969d5ed5d4850f117702110
# contains mis-aligned values which will stop the modem from being able
# to make a connection. If the first and last two bytes are removed then
# the values become valid, but the modulation will be forced to ANSI
# T1.413 only which may not be appropriate.
#
# The original binary format is a packed list of le32 values.
import sys
import struct
i = 0
while True:
buf = sys.stdin.read(4)
if len(buf) == 0:
break
elif len(buf) != 4:
sys.stdout.write("\n")
sys.stderr.write("Error: read {0} not 4 bytes\n".format(len(buf)))
sys.exit(1)
if i > 0:
sys.stdout.write(" ")
sys.stdout.write("{0:x}={1}".format(i, struct.unpack("<I", buf)[0]))
i += 1
sys.stdout.write("\n")
| gpl-2.0 |
bennojoy/ansible | test/units/errors/test_errors.py | 170 | 3108 | # (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.compat.tests import unittest
from ansible.parsing.yaml.objects import AnsibleBaseYAMLObject
from ansible.errors import AnsibleError
from ansible.compat.tests import BUILTINS
from ansible.compat.tests.mock import mock_open, patch
class TestErrors(unittest.TestCase):
def setUp(self):
self.message = 'This is the error message'
self.obj = AnsibleBaseYAMLObject()
def tearDown(self):
pass
def test_basic_error(self):
e = AnsibleError(self.message)
self.assertEqual(e.message, 'ERROR! ' + self.message)
self.assertEqual(e.__repr__(), 'ERROR! ' + self.message)
@patch.object(AnsibleError, '_get_error_lines_from_file')
def test_error_with_object(self, mock_method):
self.obj.ansible_pos = ('foo.yml', 1, 1)
mock_method.return_value = ('this is line 1\n', '')
e = AnsibleError(self.message, self.obj)
self.assertEqual(e.message, "ERROR! This is the error message\n\nThe error appears to have been in 'foo.yml': line 1, column 1, but may\nbe elsewhere in the file depending on the exact syntax problem.\n\nThe offending line appears to be:\n\n\nthis is line 1\n^ here\n")
def test_get_error_lines_from_file(self):
m = mock_open()
m.return_value.readlines.return_value = ['this is line 1\n']
with patch('{0}.open'.format(BUILTINS), m):
# this line will be found in the file
self.obj.ansible_pos = ('foo.yml', 1, 1)
e = AnsibleError(self.message, self.obj)
self.assertEqual(e.message, "ERROR! This is the error message\n\nThe error appears to have been in 'foo.yml': line 1, column 1, but may\nbe elsewhere in the file depending on the exact syntax problem.\n\nThe offending line appears to be:\n\n\nthis is line 1\n^ here\n")
# this line will not be found, as it is out of the index range
self.obj.ansible_pos = ('foo.yml', 2, 1)
e = AnsibleError(self.message, self.obj)
self.assertEqual(e.message, "ERROR! This is the error message\n\nThe error appears to have been in 'foo.yml': line 2, column 1, but may\nbe elsewhere in the file depending on the exact syntax problem.\n\n(specified line no longer in file, maybe it changed?)")
| gpl-3.0 |
bpsinc-native/src_third_party_scons-2.0.1 | engine/SCons/Scanner/Fortran.py | 61 | 14347 | """SCons.Scanner.Fortran
This module implements the dependency scanner for Fortran code.
"""
#
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
__revision__ = "src/engine/SCons/Scanner/Fortran.py 5134 2010/08/16 23:02:40 bdeegan"
import re
import SCons.Node
import SCons.Node.FS
import SCons.Scanner
import SCons.Util
import SCons.Warnings
class F90Scanner(SCons.Scanner.Classic):
"""
A Classic Scanner subclass for Fortran source files which takes
into account both USE and INCLUDE statements. This scanner will
work for both F77 and F90 (and beyond) compilers.
Currently, this scanner assumes that the include files do not contain
USE statements. To enable the ability to deal with USE statements
in include files, add logic right after the module names are found
to loop over each include file, search for and locate each USE
statement, and append each module name to the list of dependencies.
Caching the search results in a common dictionary somewhere so that
the same include file is not searched multiple times would be a
smart thing to do.
"""
def __init__(self, name, suffixes, path_variable,
use_regex, incl_regex, def_regex, *args, **kw):
self.cre_use = re.compile(use_regex, re.M)
self.cre_incl = re.compile(incl_regex, re.M)
self.cre_def = re.compile(def_regex, re.M)
def _scan(node, env, path, self=self):
node = node.rfile()
if not node.exists():
return []
return self.scan(node, env, path)
kw['function'] = _scan
kw['path_function'] = SCons.Scanner.FindPathDirs(path_variable)
kw['recursive'] = 1
kw['skeys'] = suffixes
kw['name'] = name
SCons.Scanner.Current.__init__(self, *args, **kw)
def scan(self, node, env, path=()):
# cache the includes list in node so we only scan it once:
if node.includes != None:
mods_and_includes = node.includes
else:
# retrieve all included filenames
includes = self.cre_incl.findall(node.get_text_contents())
# retrieve all USE'd module names
modules = self.cre_use.findall(node.get_text_contents())
# retrieve all defined module names
defmodules = self.cre_def.findall(node.get_text_contents())
# Remove all USE'd module names that are defined in the same file
# (case-insensitively)
d = {}
for m in defmodules:
d[m.lower()] = 1
modules = [m for m in modules if m.lower() not in d]
# Convert module name to a .mod filename
suffix = env.subst('$FORTRANMODSUFFIX')
modules = [x.lower() + suffix for x in modules]
# Remove unique items from the list
mods_and_includes = SCons.Util.unique(includes+modules)
node.includes = mods_and_includes
# This is a hand-coded DSU (decorate-sort-undecorate, or
# Schwartzian transform) pattern. The sort key is the raw name
# of the file as specifed on the USE or INCLUDE line, which lets
# us keep the sort order constant regardless of whether the file
# is actually found in a Repository or locally.
nodes = []
source_dir = node.get_dir()
if callable(path):
path = path()
for dep in mods_and_includes:
n, i = self.find_include(dep, source_dir, path)
if n is None:
SCons.Warnings.warn(SCons.Warnings.DependencyWarning,
"No dependency generated for file: %s (referenced by: %s) -- file not found" % (i, node))
else:
sortkey = self.sort_key(dep)
nodes.append((sortkey, n))
return [pair[1] for pair in sorted(nodes)]
def FortranScan(path_variable="FORTRANPATH"):
"""Return a prototype Scanner instance for scanning source files
for Fortran USE & INCLUDE statements"""
# The USE statement regex matches the following:
#
# USE module_name
# USE :: module_name
# USE, INTRINSIC :: module_name
# USE, NON_INTRINSIC :: module_name
#
# Limitations
#
# -- While the regex can handle multiple USE statements on one line,
# it cannot properly handle them if they are commented out.
# In either of the following cases:
#
# ! USE mod_a ; USE mod_b [entire line is commented out]
# USE mod_a ! ; USE mod_b [in-line comment of second USE statement]
#
# the second module name (mod_b) will be picked up as a dependency
# even though it should be ignored. The only way I can see
# to rectify this would be to modify the scanner to eliminate
# the call to re.findall, read in the contents of the file,
# treating the comment character as an end-of-line character
# in addition to the normal linefeed, loop over each line,
# weeding out the comments, and looking for the USE statements.
# One advantage to this is that the regex passed to the scanner
# would no longer need to match a semicolon.
#
# -- I question whether or not we need to detect dependencies to
# INTRINSIC modules because these are built-in to the compiler.
# If we consider them a dependency, will SCons look for them, not
# find them, and kill the build? Or will we there be standard
# compiler-specific directories we will need to point to so the
# compiler and SCons can locate the proper object and mod files?
# Here is a breakdown of the regex:
#
# (?i) : regex is case insensitive
# ^ : start of line
# (?: : group a collection of regex symbols without saving the match as a "group"
# ^|; : matches either the start of the line or a semicolon - semicolon
# ) : end the unsaved grouping
# \s* : any amount of white space
# USE : match the string USE, case insensitive
# (?: : group a collection of regex symbols without saving the match as a "group"
# \s+| : match one or more whitespace OR .... (the next entire grouped set of regex symbols)
# (?: : group a collection of regex symbols without saving the match as a "group"
# (?: : establish another unsaved grouping of regex symbols
# \s* : any amount of white space
# , : match a comma
# \s* : any amount of white space
# (?:NON_)? : optionally match the prefix NON_, case insensitive
# INTRINSIC : match the string INTRINSIC, case insensitive
# )? : optionally match the ", INTRINSIC/NON_INTRINSIC" grouped expression
# \s* : any amount of white space
# :: : match a double colon that must appear after the INTRINSIC/NON_INTRINSIC attribute
# ) : end the unsaved grouping
# ) : end the unsaved grouping
# \s* : match any amount of white space
# (\w+) : match the module name that is being USE'd
#
#
use_regex = "(?i)(?:^|;)\s*USE(?:\s+|(?:(?:\s*,\s*(?:NON_)?INTRINSIC)?\s*::))\s*(\w+)"
# The INCLUDE statement regex matches the following:
#
# INCLUDE 'some_Text'
# INCLUDE "some_Text"
# INCLUDE "some_Text" ; INCLUDE "some_Text"
# INCLUDE kind_"some_Text"
# INCLUDE kind_'some_Text"
#
# where some_Text can include any alphanumeric and/or special character
# as defined by the Fortran 2003 standard.
#
# Limitations:
#
# -- The Fortran standard dictates that a " or ' in the INCLUDE'd
# string must be represented as a "" or '', if the quotes that wrap
# the entire string are either a ' or ", respectively. While the
# regular expression below can detect the ' or " characters just fine,
# the scanning logic, presently is unable to detect them and reduce
# them to a single instance. This probably isn't an issue since,
# in practice, ' or " are not generally used in filenames.
#
# -- This regex will not properly deal with multiple INCLUDE statements
# when the entire line has been commented out, ala
#
# ! INCLUDE 'some_file' ; INCLUDE 'some_file'
#
# In such cases, it will properly ignore the first INCLUDE file,
# but will actually still pick up the second. Interestingly enough,
# the regex will properly deal with these cases:
#
# INCLUDE 'some_file'
# INCLUDE 'some_file' !; INCLUDE 'some_file'
#
# To get around the above limitation, the FORTRAN programmer could
# simply comment each INCLUDE statement separately, like this
#
# ! INCLUDE 'some_file' !; INCLUDE 'some_file'
#
# The way I see it, the only way to get around this limitation would
# be to modify the scanning logic to replace the calls to re.findall
# with a custom loop that processes each line separately, throwing
# away fully commented out lines before attempting to match against
# the INCLUDE syntax.
#
# Here is a breakdown of the regex:
#
# (?i) : regex is case insensitive
# (?: : begin a non-saving group that matches the following:
# ^ : either the start of the line
# | : or
# ['">]\s*; : a semicolon that follows a single quote,
# double quote or greater than symbol (with any
# amount of whitespace in between). This will
# allow the regex to match multiple INCLUDE
# statements per line (although it also requires
# the positive lookahead assertion that is
# used below). It will even properly deal with
# (i.e. ignore) cases in which the additional
# INCLUDES are part of an in-line comment, ala
# " INCLUDE 'someFile' ! ; INCLUDE 'someFile2' "
# ) : end of non-saving group
# \s* : any amount of white space
# INCLUDE : match the string INCLUDE, case insensitive
# \s+ : match one or more white space characters
# (?\w+_)? : match the optional "kind-param _" prefix allowed by the standard
# [<"'] : match the include delimiter - an apostrophe, double quote, or less than symbol
# (.+?) : match one or more characters that make up
# the included path and file name and save it
# in a group. The Fortran standard allows for
# any non-control character to be used. The dot
# operator will pick up any character, including
# control codes, but I can't conceive of anyone
# putting control codes in their file names.
# The question mark indicates it is non-greedy so
# that regex will match only up to the next quote,
# double quote, or greater than symbol
# (?=["'>]) : positive lookahead assertion to match the include
# delimiter - an apostrophe, double quote, or
# greater than symbol. This level of complexity
# is required so that the include delimiter is
# not consumed by the match, thus allowing the
# sub-regex discussed above to uniquely match a
# set of semicolon-separated INCLUDE statements
# (as allowed by the F2003 standard)
include_regex = """(?i)(?:^|['">]\s*;)\s*INCLUDE\s+(?:\w+_)?[<"'](.+?)(?=["'>])"""
# The MODULE statement regex finds module definitions by matching
# the following:
#
# MODULE module_name
#
# but *not* the following:
#
# MODULE PROCEDURE procedure_name
#
# Here is a breakdown of the regex:
#
# (?i) : regex is case insensitive
# ^\s* : any amount of white space
# MODULE : match the string MODULE, case insensitive
# \s+ : match one or more white space characters
# (?!PROCEDURE) : but *don't* match if the next word matches
# PROCEDURE (negative lookahead assertion),
# case insensitive
# (\w+) : match one or more alphanumeric characters
# that make up the defined module name and
# save it in a group
def_regex = """(?i)^\s*MODULE\s+(?!PROCEDURE)(\w+)"""
scanner = F90Scanner("FortranScan",
"$FORTRANSUFFIXES",
path_variable,
use_regex,
include_regex,
def_regex)
return scanner
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
| mit |
xcyan/models | domain_adaptation/pixel_domain_adaptation/baselines/baseline_train.py | 10 | 5624 | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
r"""Trains the classification/pose baselines."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from functools import partial
# Dependency imports
import tensorflow as tf
from domain_adaptation.datasets import dataset_factory
from domain_adaptation.pixel_domain_adaptation import pixelda_preprocess
from domain_adaptation.pixel_domain_adaptation import pixelda_task_towers
flags = tf.app.flags
FLAGS = flags.FLAGS
slim = tf.contrib.slim
flags.DEFINE_string('master', '', 'BNS name of the tensorflow server')
flags.DEFINE_integer('task', 0, 'The task ID.')
flags.DEFINE_integer('num_ps_tasks', 0,
'The number of parameter servers. If the value is 0, then '
'the parameters are handled locally by the worker.')
flags.DEFINE_integer('batch_size', 32, 'The number of samples per batch.')
flags.DEFINE_string('dataset_name', None, 'The name of the dataset.')
flags.DEFINE_string('dataset_dir', None,
'The directory where the data is stored.')
flags.DEFINE_string('split_name', None, 'The name of the train/test split.')
flags.DEFINE_float('learning_rate', 0.001, 'The initial learning rate.')
flags.DEFINE_integer(
'learning_rate_decay_steps', 20000,
'The frequency, in steps, at which the learning rate is decayed.')
flags.DEFINE_float('learning_rate_decay_factor',
0.95,
'The factor with which the learning rate is decayed.')
flags.DEFINE_float('adam_beta1', 0.5, 'The beta1 value for the AdamOptimizer')
flags.DEFINE_float('weight_decay', 1e-5,
'The L2 coefficient on the model weights.')
flags.DEFINE_string(
'logdir', None, 'The location of the logs and checkpoints.')
flags.DEFINE_integer('save_interval_secs', 600,
'How often, in seconds, we save the model to disk.')
flags.DEFINE_integer('save_summaries_secs', 600,
'How often, in seconds, we compute the summaries.')
flags.DEFINE_integer(
'num_readers', 4,
'The number of parallel readers that read data from the dataset.')
flags.DEFINE_float(
'moving_average_decay', 0.9999,
'The amount of decay to use for moving averages.')
def main(unused_argv):
tf.logging.set_verbosity(tf.logging.INFO)
hparams = tf.contrib.training.HParams()
hparams.weight_decay_task_classifier = FLAGS.weight_decay
if FLAGS.dataset_name in ['mnist', 'mnist_m', 'usps']:
hparams.task_tower = 'mnist'
else:
raise ValueError('Unknown dataset %s' % FLAGS.dataset_name)
with tf.Graph().as_default():
with tf.device(
tf.train.replica_device_setter(FLAGS.num_ps_tasks, merge_devices=True)):
dataset = dataset_factory.get_dataset(FLAGS.dataset_name,
FLAGS.split_name, FLAGS.dataset_dir)
num_classes = dataset.num_classes
preprocess_fn = partial(pixelda_preprocess.preprocess_classification,
is_training=True)
images, labels = dataset_factory.provide_batch(
FLAGS.dataset_name,
FLAGS.split_name,
dataset_dir=FLAGS.dataset_dir,
num_readers=FLAGS.num_readers,
batch_size=FLAGS.batch_size,
num_preprocessing_threads=FLAGS.num_readers)
# preprocess_fn=preprocess_fn)
# Define the model
logits, _ = pixelda_task_towers.add_task_specific_model(
images, hparams, num_classes=num_classes, is_training=True)
# Define the losses
if 'classes' in labels:
one_hot_labels = labels['classes']
loss = tf.losses.softmax_cross_entropy(
onehot_labels=one_hot_labels, logits=logits)
tf.summary.scalar('losses/Classification_Loss', loss)
else:
raise ValueError('Only support classification for now.')
total_loss = tf.losses.get_total_loss()
tf.summary.scalar('losses/Total_Loss', total_loss)
# Setup the moving averages
moving_average_variables = slim.get_model_variables()
variable_averages = tf.train.ExponentialMovingAverage(
FLAGS.moving_average_decay, slim.get_or_create_global_step())
tf.add_to_collection(
tf.GraphKeys.UPDATE_OPS,
variable_averages.apply(moving_average_variables))
# Specify the optimization scheme:
learning_rate = tf.train.exponential_decay(
FLAGS.learning_rate,
slim.get_or_create_global_step(),
FLAGS.learning_rate_decay_steps,
FLAGS.learning_rate_decay_factor,
staircase=True)
optimizer = tf.train.AdamOptimizer(learning_rate, beta1=FLAGS.adam_beta1)
train_op = slim.learning.create_train_op(total_loss, optimizer)
slim.learning.train(
train_op,
FLAGS.logdir,
master=FLAGS.master,
is_chief=(FLAGS.task == 0),
save_summaries_secs=FLAGS.save_summaries_secs,
save_interval_secs=FLAGS.save_interval_secs)
if __name__ == '__main__':
tf.app.run()
| apache-2.0 |
michaelBenin/django-oscar | oscar/apps/catalogue/migrations/0006_auto__add_field_product_is_discountable.py | 18 | 12901 | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Product.is_discountable'
db.add_column('catalogue_product', 'is_discountable', self.gf('django.db.models.fields.BooleanField')(default=True), keep_default=False)
def backwards(self, orm):
# Deleting field 'Product.is_discountable'
db.delete_column('catalogue_product', 'is_discountable')
models = {
'catalogue.attributeentity': {
'Meta': {'object_name': 'AttributeEntity'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'slug': ('django.db.models.fields.SlugField', [], {'db_index': 'True', 'max_length': '255', 'blank': 'True'}),
'type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'entities'", 'to': "orm['catalogue.AttributeEntityType']"})
},
'catalogue.attributeentitytype': {
'Meta': {'object_name': 'AttributeEntityType'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'slug': ('django.db.models.fields.SlugField', [], {'db_index': 'True', 'max_length': '255', 'blank': 'True'})
},
'catalogue.attributeoption': {
'Meta': {'object_name': 'AttributeOption'},
'group': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'options'", 'to': "orm['catalogue.AttributeOptionGroup']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'option': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'catalogue.attributeoptiongroup': {
'Meta': {'object_name': 'AttributeOptionGroup'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'})
},
'catalogue.category': {
'Meta': {'ordering': "['full_name']", 'object_name': 'Category'},
'depth': ('django.db.models.fields.PositiveIntegerField', [], {}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'full_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'numchild': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'path': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255', 'db_index': 'True'})
},
'catalogue.contributor': {
'Meta': {'object_name': 'Contributor'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255', 'db_index': 'True'})
},
'catalogue.contributorrole': {
'Meta': {'object_name': 'ContributorRole'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'name_plural': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'db_index': 'True'})
},
'catalogue.option': {
'Meta': {'object_name': 'Option'},
'code': ('django.db.models.fields.SlugField', [], {'max_length': '128', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'type': ('django.db.models.fields.CharField', [], {'default': "'Required'", 'max_length': '128'})
},
'catalogue.product': {
'Meta': {'ordering': "['-date_created']", 'object_name': 'Product'},
'attributes': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['catalogue.ProductAttribute']", 'through': "orm['catalogue.ProductAttributeValue']", 'symmetrical': 'False'}),
'categories': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['catalogue.Category']", 'through': "orm['catalogue.ProductCategory']", 'symmetrical': 'False'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_discountable': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'variants'", 'null': 'True', 'to': "orm['catalogue.Product']"}),
'product_class': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.ProductClass']", 'null': 'True'}),
'product_options': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['catalogue.Option']", 'symmetrical': 'False', 'blank': 'True'}),
'recommended_products': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['catalogue.Product']", 'symmetrical': 'False', 'through': "orm['catalogue.ProductRecommendation']", 'blank': 'True'}),
'related_products': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'relations'", 'blank': 'True', 'to': "orm['catalogue.Product']"}),
'score': ('django.db.models.fields.FloatField', [], {'default': '0.0', 'db_index': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255', 'db_index': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '128', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'upc': ('django.db.models.fields.CharField', [], {'max_length': '64', 'unique': 'True', 'null': 'True', 'blank': 'True'})
},
'catalogue.productattribute': {
'Meta': {'ordering': "['code']", 'object_name': 'ProductAttribute'},
'code': ('django.db.models.fields.SlugField', [], {'max_length': '128', 'db_index': 'True'}),
'entity_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.AttributeEntityType']", 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'option_group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.AttributeOptionGroup']", 'null': 'True', 'blank': 'True'}),
'product_class': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'attributes'", 'null': 'True', 'to': "orm['catalogue.ProductClass']"}),
'required': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'type': ('django.db.models.fields.CharField', [], {'default': "'text'", 'max_length': '20'})
},
'catalogue.productattributevalue': {
'Meta': {'object_name': 'ProductAttributeValue'},
'attribute': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.ProductAttribute']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'attribute_values'", 'to': "orm['catalogue.Product']"}),
'value_boolean': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'value_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'value_entity': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.AttributeEntity']", 'null': 'True', 'blank': 'True'}),
'value_float': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'value_integer': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'value_option': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.AttributeOption']", 'null': 'True', 'blank': 'True'}),
'value_richtext': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'value_text': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'})
},
'catalogue.productcategory': {
'Meta': {'ordering': "['-is_canonical']", 'object_name': 'ProductCategory'},
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.Category']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_canonical': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.Product']"})
},
'catalogue.productclass': {
'Meta': {'ordering': "['name']", 'object_name': 'ProductClass'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'options': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['catalogue.Option']", 'symmetrical': 'False', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '128', 'db_index': 'True'})
},
'catalogue.productcontributor': {
'Meta': {'object_name': 'ProductContributor'},
'contributor': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.Contributor']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.Product']"}),
'role': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.ContributorRole']", 'null': 'True', 'blank': 'True'})
},
'catalogue.productimage': {
'Meta': {'ordering': "['display_order']", 'unique_together': "(('product', 'display_order'),)", 'object_name': 'ProductImage'},
'caption': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'display_order': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'original': ('django.db.models.fields.files.ImageField', [], {'max_length': '100'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'images'", 'to': "orm['catalogue.Product']"})
},
'catalogue.productrecommendation': {
'Meta': {'object_name': 'ProductRecommendation'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'primary': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'primary_recommendations'", 'to': "orm['catalogue.Product']"}),
'ranking': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'recommendation': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.Product']"})
}
}
complete_apps = ['catalogue']
| bsd-3-clause |
ovnicraft/odoo | addons/crm/crm_phonecall.py | 255 | 14844 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-today OpenERP SA (<http://www.openerp.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import crm
from datetime import datetime
from openerp.osv import fields, osv
from openerp.tools import DEFAULT_SERVER_DATETIME_FORMAT
from openerp.tools.translate import _
class crm_phonecall(osv.osv):
""" Model for CRM phonecalls """
_name = "crm.phonecall"
_description = "Phonecall"
_order = "id desc"
_inherit = ['mail.thread']
_columns = {
'date_action_last': fields.datetime('Last Action', readonly=1),
'date_action_next': fields.datetime('Next Action', readonly=1),
'create_date': fields.datetime('Creation Date' , readonly=True),
'section_id': fields.many2one('crm.case.section', 'Sales Team', \
select=True, help='Sales team to which Case belongs to.'),
'user_id': fields.many2one('res.users', 'Responsible'),
'partner_id': fields.many2one('res.partner', 'Contact'),
'company_id': fields.many2one('res.company', 'Company'),
'description': fields.text('Description'),
'state': fields.selection(
[('open', 'Confirmed'),
('cancel', 'Cancelled'),
('pending', 'Pending'),
('done', 'Held')
], string='Status', readonly=True, track_visibility='onchange',
help='The status is set to Confirmed, when a case is created.\n'
'When the call is over, the status is set to Held.\n'
'If the callis not applicable anymore, the status can be set to Cancelled.'),
'email_from': fields.char('Email', size=128, help="These people will receive email."),
'date_open': fields.datetime('Opened', readonly=True),
# phonecall fields
'name': fields.char('Call Summary', required=True),
'active': fields.boolean('Active', required=False),
'duration': fields.float('Duration', help='Duration in minutes and seconds.'),
'categ_id': fields.many2one('crm.case.categ', 'Category', \
domain="['|',('section_id','=',section_id),('section_id','=',False),\
('object_id.model', '=', 'crm.phonecall')]"),
'partner_phone': fields.char('Phone'),
'partner_mobile': fields.char('Mobile'),
'priority': fields.selection([('0','Low'), ('1','Normal'), ('2','High')], 'Priority'),
'date_closed': fields.datetime('Closed', readonly=True),
'date': fields.datetime('Date'),
'opportunity_id': fields.many2one ('crm.lead', 'Lead/Opportunity'),
}
def _get_default_state(self, cr, uid, context=None):
if context and context.get('default_state'):
return context.get('default_state')
return 'open'
_defaults = {
'date': fields.datetime.now,
'priority': '1',
'state': _get_default_state,
'user_id': lambda self, cr, uid, ctx: uid,
'active': 1
}
def on_change_partner_id(self, cr, uid, ids, partner_id, context=None):
values = {}
if partner_id:
partner = self.pool.get('res.partner').browse(cr, uid, partner_id, context=context)
values = {
'partner_phone': partner.phone,
'partner_mobile': partner.mobile,
}
return {'value': values}
def write(self, cr, uid, ids, values, context=None):
""" Override to add case management: open/close dates """
if values.get('state'):
if values.get('state') == 'done':
values['date_closed'] = fields.datetime.now()
self.compute_duration(cr, uid, ids, context=context)
elif values.get('state') == 'open':
values['date_open'] = fields.datetime.now()
values['duration'] = 0.0
return super(crm_phonecall, self).write(cr, uid, ids, values, context=context)
def compute_duration(self, cr, uid, ids, context=None):
for phonecall in self.browse(cr, uid, ids, context=context):
if phonecall.duration <= 0:
duration = datetime.now() - datetime.strptime(phonecall.date, DEFAULT_SERVER_DATETIME_FORMAT)
values = {'duration': duration.seconds/float(60)}
self.write(cr, uid, [phonecall.id], values, context=context)
return True
def schedule_another_phonecall(self, cr, uid, ids, schedule_time, call_summary, \
user_id=False, section_id=False, categ_id=False, action='schedule', context=None):
"""
action :('schedule','Schedule a call'), ('log','Log a call')
"""
model_data = self.pool.get('ir.model.data')
phonecall_dict = {}
if not categ_id:
try:
res_id = model_data._get_id(cr, uid, 'crm', 'categ_phone2')
categ_id = model_data.browse(cr, uid, res_id, context=context).res_id
except ValueError:
pass
for call in self.browse(cr, uid, ids, context=context):
if not section_id:
section_id = call.section_id and call.section_id.id or False
if not user_id:
user_id = call.user_id and call.user_id.id or False
if not schedule_time:
schedule_time = call.date
vals = {
'name' : call_summary,
'user_id' : user_id or False,
'categ_id' : categ_id or False,
'description' : call.description or False,
'date' : schedule_time,
'section_id' : section_id or False,
'partner_id': call.partner_id and call.partner_id.id or False,
'partner_phone' : call.partner_phone,
'partner_mobile' : call.partner_mobile,
'priority': call.priority,
'opportunity_id': call.opportunity_id and call.opportunity_id.id or False,
}
new_id = self.create(cr, uid, vals, context=context)
if action == 'log':
self.write(cr, uid, [new_id], {'state': 'done'}, context=context)
phonecall_dict[call.id] = new_id
return phonecall_dict
def _call_create_partner(self, cr, uid, phonecall, context=None):
partner = self.pool.get('res.partner')
partner_id = partner.create(cr, uid, {
'name': phonecall.name,
'user_id': phonecall.user_id.id,
'comment': phonecall.description,
'address': []
})
return partner_id
def on_change_opportunity(self, cr, uid, ids, opportunity_id, context=None):
values = {}
if opportunity_id:
opportunity = self.pool.get('crm.lead').browse(cr, uid, opportunity_id, context=context)
values = {
'section_id' : opportunity.section_id and opportunity.section_id.id or False,
'partner_phone' : opportunity.phone,
'partner_mobile' : opportunity.mobile,
'partner_id' : opportunity.partner_id and opportunity.partner_id.id or False,
}
return {'value' : values}
def _call_set_partner(self, cr, uid, ids, partner_id, context=None):
write_res = self.write(cr, uid, ids, {'partner_id' : partner_id}, context=context)
self._call_set_partner_send_note(cr, uid, ids, context)
return write_res
def _call_create_partner_address(self, cr, uid, phonecall, partner_id, context=None):
address = self.pool.get('res.partner')
return address.create(cr, uid, {
'parent_id': partner_id,
'name': phonecall.name,
'phone': phonecall.partner_phone,
})
def handle_partner_assignation(self, cr, uid, ids, action='create', partner_id=False, context=None):
"""
Handle partner assignation during a lead conversion.
if action is 'create', create new partner with contact and assign lead to new partner_id.
otherwise assign lead to specified partner_id
:param list ids: phonecalls ids to process
:param string action: what has to be done regarding partners (create it, assign an existing one, or nothing)
:param int partner_id: partner to assign if any
:return dict: dictionary organized as followed: {lead_id: partner_assigned_id}
"""
#TODO this is a duplication of the handle_partner_assignation method of crm_lead
partner_ids = {}
# If a partner_id is given, force this partner for all elements
force_partner_id = partner_id
for call in self.browse(cr, uid, ids, context=context):
# If the action is set to 'create' and no partner_id is set, create a new one
if action == 'create':
partner_id = force_partner_id or self._call_create_partner(cr, uid, call, context=context)
self._call_create_partner_address(cr, uid, call, partner_id, context=context)
self._call_set_partner(cr, uid, [call.id], partner_id, context=context)
partner_ids[call.id] = partner_id
return partner_ids
def redirect_phonecall_view(self, cr, uid, phonecall_id, context=None):
model_data = self.pool.get('ir.model.data')
# Select the view
tree_view = model_data.get_object_reference(cr, uid, 'crm', 'crm_case_phone_tree_view')
form_view = model_data.get_object_reference(cr, uid, 'crm', 'crm_case_phone_form_view')
search_view = model_data.get_object_reference(cr, uid, 'crm', 'view_crm_case_phonecalls_filter')
value = {
'name': _('Phone Call'),
'view_type': 'form',
'view_mode': 'tree,form',
'res_model': 'crm.phonecall',
'res_id' : int(phonecall_id),
'views': [(form_view and form_view[1] or False, 'form'), (tree_view and tree_view[1] or False, 'tree'), (False, 'calendar')],
'type': 'ir.actions.act_window',
'search_view_id': search_view and search_view[1] or False,
}
return value
def convert_opportunity(self, cr, uid, ids, opportunity_summary=False, partner_id=False, planned_revenue=0.0, probability=0.0, context=None):
partner = self.pool.get('res.partner')
opportunity = self.pool.get('crm.lead')
opportunity_dict = {}
default_contact = False
for call in self.browse(cr, uid, ids, context=context):
if not partner_id:
partner_id = call.partner_id and call.partner_id.id or False
if partner_id:
address_id = partner.address_get(cr, uid, [partner_id])['default']
if address_id:
default_contact = partner.browse(cr, uid, address_id, context=context)
opportunity_id = opportunity.create(cr, uid, {
'name': opportunity_summary or call.name,
'planned_revenue': planned_revenue,
'probability': probability,
'partner_id': partner_id or False,
'mobile': default_contact and default_contact.mobile,
'section_id': call.section_id and call.section_id.id or False,
'description': call.description or False,
'priority': call.priority,
'type': 'opportunity',
'phone': call.partner_phone or False,
'email_from': default_contact and default_contact.email,
})
vals = {
'partner_id': partner_id,
'opportunity_id': opportunity_id,
'state': 'done',
}
self.write(cr, uid, [call.id], vals, context=context)
opportunity_dict[call.id] = opportunity_id
return opportunity_dict
def action_make_meeting(self, cr, uid, ids, context=None):
"""
Open meeting's calendar view to schedule a meeting on current phonecall.
:return dict: dictionary value for created meeting view
"""
partner_ids = []
phonecall = self.browse(cr, uid, ids[0], context)
if phonecall.partner_id and phonecall.partner_id.email:
partner_ids.append(phonecall.partner_id.id)
res = self.pool.get('ir.actions.act_window').for_xml_id(cr, uid, 'calendar', 'action_calendar_event', context)
res['context'] = {
'default_phonecall_id': phonecall.id,
'default_partner_ids': partner_ids,
'default_user_id': uid,
'default_email_from': phonecall.email_from,
'default_name': phonecall.name,
}
return res
def action_button_convert2opportunity(self, cr, uid, ids, context=None):
"""
Convert a phonecall into an opp and then redirect to the opp view.
:param list ids: list of calls ids to convert (typically contains a single id)
:return dict: containing view information
"""
if len(ids) != 1:
raise osv.except_osv(_('Warning!'),_('It\'s only possible to convert one phonecall at a time.'))
opportunity_dict = self.convert_opportunity(cr, uid, ids, context=context)
return self.pool.get('crm.lead').redirect_opportunity_view(cr, uid, opportunity_dict[ids[0]], context)
# ----------------------------------------
# OpenChatter
# ----------------------------------------
def _call_set_partner_send_note(self, cr, uid, ids, context=None):
return self.message_post(cr, uid, ids, body=_("Partner has been <b>created</b>."), context=context)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
AltSchool/django | django/conf/locale/en/formats.py | 1007 | 1815 | # -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
from __future__ import unicode_literals
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = 'N j, Y'
TIME_FORMAT = 'P'
DATETIME_FORMAT = 'N j, Y, P'
YEAR_MONTH_FORMAT = 'F Y'
MONTH_DAY_FORMAT = 'F j'
SHORT_DATE_FORMAT = 'm/d/Y'
SHORT_DATETIME_FORMAT = 'm/d/Y P'
FIRST_DAY_OF_WEEK = 0 # Sunday
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
# Kept ISO formats as they are in first position
DATE_INPUT_FORMATS = [
'%Y-%m-%d', '%m/%d/%Y', '%m/%d/%y', # '2006-10-25', '10/25/2006', '10/25/06'
# '%b %d %Y', '%b %d, %Y', # 'Oct 25 2006', 'Oct 25, 2006'
# '%d %b %Y', '%d %b, %Y', # '25 Oct 2006', '25 Oct, 2006'
# '%B %d %Y', '%B %d, %Y', # 'October 25 2006', 'October 25, 2006'
# '%d %B %Y', '%d %B, %Y', # '25 October 2006', '25 October, 2006'
]
DATETIME_INPUT_FORMATS = [
'%Y-%m-%d %H:%M:%S', # '2006-10-25 14:30:59'
'%Y-%m-%d %H:%M:%S.%f', # '2006-10-25 14:30:59.000200'
'%Y-%m-%d %H:%M', # '2006-10-25 14:30'
'%Y-%m-%d', # '2006-10-25'
'%m/%d/%Y %H:%M:%S', # '10/25/2006 14:30:59'
'%m/%d/%Y %H:%M:%S.%f', # '10/25/2006 14:30:59.000200'
'%m/%d/%Y %H:%M', # '10/25/2006 14:30'
'%m/%d/%Y', # '10/25/2006'
'%m/%d/%y %H:%M:%S', # '10/25/06 14:30:59'
'%m/%d/%y %H:%M:%S.%f', # '10/25/06 14:30:59.000200'
'%m/%d/%y %H:%M', # '10/25/06 14:30'
'%m/%d/%y', # '10/25/06'
]
DECIMAL_SEPARATOR = '.'
THOUSAND_SEPARATOR = ','
NUMBER_GROUPING = 3
| bsd-3-clause |
jszymon/pacal | pacal/vartransforms.py | 1 | 10727 | """Variable transforms. Used for mapping to infinite intervals etc."""
from __future__ import print_function
from numpy import Inf
from numpy import hypot, sqrt, sign
from numpy import array, asfarray, empty_like, isscalar, all, equal
class VarTransform(object):
"""Base class for variable transforms."""
def inv_var_change_with_mask(self, t):
eq = equal.outer(t, self.var_inf)
mask = ~eq.any(axis=-1)
if (~mask).any():
if isscalar(t):
x = 0 # must masked; can pick any value, use 0
else:
t = asfarray(t)
x = empty_like(t)
x[mask] = self.inv_var_change(t[mask])
else:
x = self.inv_var_change(t)
return x, mask
def apply_with_inv_transform(self, f, t, def_val = 0, mul_by_deriv = False):
"""Apply function f to vartransform of t.
Accepts vector inputs. Values at infinity are set to def_val."""
x, mask = self.inv_var_change_with_mask(t)
if (~mask).any():
if isscalar(x):
y = def_val
else:
y = empty_like(x)
y[mask] = f(x[mask])
if mul_by_deriv:
y[mask] *= self.inv_var_change_deriv(t[mask])
y[~mask] = def_val
else:
y = f(x)
if mul_by_deriv:
y *= self.inv_var_change_deriv(t)
return y
class VarTransformIdentity(VarTransform):
"""The identity transform."""
def var_change(self, x):
return x
def inv_var_change(self, t):
return t
def inv_var_change_deriv(self, t):
return 1.0
var_min = -1.0
var_max = +1.0
var_inf = [] # parameter values corresponding to infinity. Do
# not distinguish +oo and -oo
### Variable transforms
class VarTransformReciprocal_PMInf(VarTransform):
"""Reciprocal variable transform."""
def __init__(self, exponent = 1):
self.exponent = exponent
def var_change(self, x):
#if x > 0:
# t = x / (x + 1.0)
#else:
# t = x / (1.0 - x)
t = x / (1.0 + abs(x))
return t
def inv_var_change(self, t):
#if t > 0:
# x = t / (1.0 - t)
#else:
# x = t / (1.0 + t)
x = t / (1.0 - abs(t))
return x
def inv_var_change_deriv(self, t):
return 1.0 / ((1.0 - abs(t)) * (1.0 - abs(t)))
var_min = -1.0
var_max = +1.0
var_inf = [-1.0, +1.0] # parameter values corresponding to infinity. Do
# not distinguish +oo and -oo
class VarTransformReciprocal_PInf(VarTransform):
"""Reciprocal variable transform.
Optionally an exponent different from 1 can be specified. If U is
given, than the tranform is into finite interval [L, U]."""
def __init__(self, L = 0, exponent = 1, U = None):
self.exponent = exponent
self.L = L
self.U = U
if self.L == 0:
self.offset = 1.0
else:
self.offset = abs(self.L) / 2
if U is not None:
self.var_min = self.var_change(U)
self.var_inf = []
def var_change(self, x):
#assert all(x >= self.L)
if self.exponent == 1:
t = self.offset / (x - self.L + self.offset)
elif self.exponent == 2:
t = sqrt(self.offset / (x - self.L + self.offset))
else:
t = (self.offset / (x - self.L + self.offset))**(1.0/self.exponent)
return t
def inv_var_change(self, t):
if self.exponent == 1:
x = self.L - self.offset + self.offset / t
else:
x = self.L - self.offset + self.offset / t**self.exponent
return x
def inv_var_change_deriv(self, t):
if self.exponent == 1:
der = self.offset / (t * t)
else:
der = self.offset * float(self.exponent) / t**(self.exponent + 1)
return der
var_min = 0
var_max = 1
var_inf = [0] # parameter values corresponding to infinity. Do
# not distinguish +oo and -oo
class VarTransformReciprocal_MInf(VarTransform):
"""Reciprocal variable transform.
Optionally an exponent different from 1 can be specified. If L is
given, than the tranform is into finite interval [L, U]."""
def __init__(self, U = 0, exponent = 1, L = None):
self.exponent = exponent
self.L = L
self.U = U
if self.U == 0:
self.offset = 1.0
else:
self.offset = abs(self.U) / 2
if L is not None:
self.var_min = self.var_change(L)
self.var_inf = []
def var_change(self, x):
#assert all(x <= self.U)
if self.exponent == 1:
t = -self.offset / (x - self.U - self.offset)
elif self.exponent == 2:
t = sqrt(-self.offset / (x - self.U - self.offset))
else:
t = (self.offset / abs(x - self.U - self.offset))**(1.0/self.exponent)
return t
def inv_var_change(self, t):
if self.exponent == 1:
x = self.U + self.offset - self.offset / t
elif self.exponent == 2:
x = self.U + self.offset - self.offset / (t*t)
else:
x = self.U + self.offset - self.offset / t**self.exponent
return x
def inv_var_change_deriv(self, t):
if self.exponent == 1:
der = self.offset / (t * t)
else:
der = self.offset * float(self.exponent) / t**(self.exponent + 1)
return der
var_min = 0
var_max = 1
var_inf = [0] # parameter values corresponding to infinity. Do
# not distinguish +oo and -oo
# variable transforms suggested by Boyd
class VarTransformAlgebraic_PMInf(VarTransform):
"""Variable transform suggested by Boyd.
Leads to Chebyshev rational functions."""
def __init__(self, c = 1):
self.c = c # this corresponds to Boyd's L param
def var_change(self, x):
t = x / hypot(self.c, x)
return t
def inv_var_change(self, t):
x = self.c * t / sqrt(1.0 - t*t)
return x
def inv_var_change_deriv(self, t):
t2 = t * t
der = t2 / sqrt((1.0 - t2)**3) + 1.0 / sqrt(1.0 - t2)
return self.c * der
var_min = -1.0
var_max = +1.0
var_inf = [-1.0, +1.0] # parameter values corresponding to infinity. Do
# not distinguish +oo and -oo
class VarTransformAlgebraic_PInf(VarTransform):
"""Variable transform suggested by Boyd."""
def __init__(self, L = 0, c = 1):
self.L = float(L) # lower bound
self.c = c # this corresponds to Boyd's L param
def var_change(self, x):
#assert all(x >= self.L)
if ~all(x >= self.L):
print("assert all(x >= self.L)")
print(x)
print(x < self.L)
t = (x - self.L - self.c) / (x - self.L + self.c)
return t
def inv_var_change(self, t):
x = self.L + self.c * (1.0 + t) / (1.0 - t)
return x
def inv_var_change_deriv(self, t):
der = 2.0 * self.c / (1.0 - t)**2
return der
var_min = -1.0
var_max = +1.0
var_inf = [+1.0] # parameter values corresponding to infinity. Do
# not distinguish +oo and -oo
class VarTransformAlgebraic_MInf(VarTransform):
"""Variable transform suggested by Boyd."""
def __init__(self, U = 0, c = 1):
self.U = float(U) # upper bound
self.c = c # this corresponds to Boyd's L param
def var_change(self, x):
#assert all(x <= self.U)
if ~all(x <= self.U):
print("assert all(x >= self.L)")
print(x)
print(x < self.U)
t = (-(x - self.U) - self.c) / (-(x - self.U) + self.c)
return t
def inv_var_change(self, t):
x = self.U - self.c * (1.0 + t) / (1.0 - t)
return x
def inv_var_change_deriv(self, t):
der = 2.0 * self.c / (1.0 - t)**2
return der
var_min = -1.0
var_max = +1.0
var_inf = [+1.0] # parameter values corresponding to infinity. Do
# not distinguish +oo and -oo
def plot_transformed(f, vt):
"""A debugging plot of f under variable transfom vt."""
from pylab import plot, show, linspace
T = linspace(vt.var_min, vt.var_max, 1000)
Y = [f(vt.inv_var_change(t)) if t not in vt.var_inf else 0 for t in T]
plot(T, Y, linewidth=5)
def plot_transformed_w_deriv(f, vt):
"""A debugging plot of f under variable transfom vt including the
derivative of inverse transform."""
from pylab import plot, show, linspace
T = linspace(vt.var_min, vt.var_max, 1000)
Y = [f(vt.inv_var_change(t))*vt.inv_var_change_deriv(t) if t not in vt.var_inf else 0 for t in T]
plot(T, Y, linewidth=5)
def plot_invtransformed_tail(f, vt):
from pylab import loglog, show, logspace
X = logspace(1, 50, 1000)
Y = f(vt.var_change(X))
loglog(X, Y)
if __name__ == "__main__":
vt = VarTransformAlgebraic_PMInf()
print(vt.inv_var_change_with_mask(array([-1,0,1])))
print(vt.inv_var_change_with_mask(-1))
print(vt.apply_with_inv_transform(lambda x: x+1, array([-1,0,1])))
print(vt.apply_with_inv_transform(lambda x: x+1, 0))
print(vt.apply_with_inv_transform(lambda x: x+1, -1))
from numpy import exp
from pylab import show
#plot_transformed(lambda x: 1.0/(1+x*x), VarTransformAlgebraic_PInf(1))
#plot_transformed(lambda x: exp(-x*x), VarTransformAlgebraic_PMInf())
#plot_transformed_w_deriv(lambda x: 1.0/(1+x*x), VarTransformAlgebraic_PMInf())
#plot_transformed_w_deriv(lambda x: exp(-x*x), VarTransformAlgebraic_PMInf())
#plot_transformed(lambda x: 1.0/(1+x*x), VarTransformReciprocal_PInf())
#plot_transformed(lambda x: exp(-x*x), VarTransformReciprocal_PInf())
#plot_transformed(lambda x: 1.0/(1+x**1.0), VarTransformReciprocal_PInf())
#plot_transformed(lambda x: 1.0/(1+x**1.2), VarTransformReciprocal_PInf())
#plot_transformed(lambda x: 1.0/(1+x**1.5), VarTransformReciprocal_PInf())
#plot_transformed(lambda x: 1.0/(1+x**2.0), VarTransformReciprocal_PInf())
#plot_transformed(lambda x: 1.0/(1+x**2.0), VarTransformIdentity())
#plot_transformed(lambda x: 1.0/(1+x**2.0), VarTransformReciprocal_PInf(U = 2))
#plot_transformed(lambda x: 1.0/(1+x**2.0), VarTransformReciprocal_MInf())
#plot_transformed(lambda x: 1.0/(1+x**2.0), VarTransformReciprocal_MInf(L = -2))
plot_invtransformed_tail(lambda x: x, VarTransformReciprocal_PInf(L = 10))
plot_invtransformed_tail(lambda x: 1-x, VarTransformAlgebraic_PInf(L = 10))
show()
| gpl-3.0 |
cwtaylor/viper | viper/modules/pymacho/MachOEncryptionInfoCommand.py | 6 | 1864 | # encoding: utf-8
"""
Copyright 2013 Jérémie BOUTOILLE
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
from struct import unpack
from viper.modules.pymacho.MachOLoadCommand import MachOLoadCommand
from viper.modules.pymacho.Utils import green
class MachOEncryptionInfoCommand(MachOLoadCommand):
cryptoff = 0
cryptsize = 0
cryptid = 0
def __init__(self, macho_file=None, cmd=0):
self.cmd = cmd
if macho_file is not None:
self.parse(macho_file)
def parse(self, macho_file):
self.cryptoff, self.cryptsize = unpack('<II', macho_file.read(4*2))
self.cryptid = unpack('<I', macho_file.read(4))[0]
def write(self, macho_file):
before = macho_file.tell()
macho_file.write(pack('<II', self.cmd, 0x0))
macho_file.write(pack('<III', self.cryptoff, self.cryptsize, self.cryptid))
after = macho_file.tell()
macho_file.seek(before+4)
macho_file.write(pack('<I', after-before))
macho_file.seek(after)
def display(self, before=''):
print before + green("[+]")+" LC_ENCRYPTION_INFO"
print before + "\t- cryptoff : 0x%x" % self.cryptoff
print before + "\t- cryptsize : 0x%x" % self.cryptsize
print before + "\t- crypptid : 0x%x" % self.cryptid
| bsd-3-clause |
node-modules/emoji | bin/create_emoji_js.py | 6 | 2090 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# install pyquery first: $ sudo easy_install pyquery
import os
from pyquery import PyQuery as pq
project_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# d = pq(url='https://raw.github.com/iamcal/php-emoji/master/table.htm')
d = pq(open(project_dir + '/lib/table.htm', 'rb').read())
tr = d('table tr')
content = open(project_dir + '/tpl/emoji_header.js', 'rb').read().decode('utf-8')
for tre in tr[1:]:
tds = pq(tre)('td')
# val, name, Unified DoCoMo KDDI Softbank Google
item = ['', '', '', '', '', '', '']
for index, tde in enumerate(tds):
td = pq(tde)
# <td><span class="emoji emoji2320e3"></span></td>
if index == 0:
val = td('span').attr('class')[11:].decode('utf-8')
else:
val = td.text().decode('utf-8')
source = val
if index != 1 and val != '-':
# convert to str
val = val[2:]
val = val.split(' U+')
val[0] = (r'\U' + '0' * (8 - len(val[0])) + val[0].lower()).decode('unicode-escape')
if len(val) > 1:
val[1] = (r'\U' + '0' * (8 - len(val[1])) + val[1].lower()).decode('unicode-escape')
val = val[0] + val[1]
else:
val = val[0]
if index > 1:
val = [val, source]
item[index] = val
# print item
# unified: [unified_unicode, name, classname, docomo, kddi, softbank, google]
content += u' "' + item[2][0] + '": ["' + item[2][1] + '", "' + item[1] + '", "' + item[0] + '", ["' \
+ item[3][0] + '", "' + item[3][1] + '"], ["' \
+ item[4][0] + '", "' + item[4][1] + '"], ["' \
+ item[5][0] + '", "' + item[5][1] + '"], ["' \
+ item[6][0] + '", "' + item[6][1] + '"]],\n'
content = content[:-2] + u'\n};\n\n'
content += open(project_dir + '/tpl/emoji_footer.js', 'rb').read().decode('utf-8')
f = open(project_dir + '/lib/emoji.js', 'wb')
f.write(content.encode('utf-8'))
f.close()
| mit |
5t111111/markdown-preview.vim | markdownpreview_lib/pygments/styles/bw.py | 364 | 1355 | # -*- coding: utf-8 -*-
"""
pygments.styles.bw
~~~~~~~~~~~~~~~~~~
Simple black/white only style.
:copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.style import Style
from pygments.token import Keyword, Name, Comment, String, Error, \
Operator, Generic
class BlackWhiteStyle(Style):
background_color = "#ffffff"
default_style = ""
styles = {
Comment: "italic",
Comment.Preproc: "noitalic",
Keyword: "bold",
Keyword.Pseudo: "nobold",
Keyword.Type: "nobold",
Operator.Word: "bold",
Name.Class: "bold",
Name.Namespace: "bold",
Name.Exception: "bold",
Name.Entity: "bold",
Name.Tag: "bold",
String: "italic",
String.Interpol: "bold",
String.Escape: "bold",
Generic.Heading: "bold",
Generic.Subheading: "bold",
Generic.Emph: "italic",
Generic.Strong: "bold",
Generic.Prompt: "bold",
Error: "border:#FF0000"
}
| lgpl-2.1 |
mhnatiuk/phd_sociology_of_religion | scrapper/lib/python2.7/site-packages/twisted/conch/insults/helper.py | 30 | 14146 | # -*- test-case-name: twisted.conch.test.test_helper -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Partial in-memory terminal emulator
@author: Jp Calderone
"""
import re, string
from zope.interface import implements
from twisted.internet import defer, protocol, reactor
from twisted.python import log, _textattributes
from twisted.python.deprecate import deprecated, deprecatedModuleAttribute
from twisted.python.versions import Version
from twisted.conch.insults import insults
FOREGROUND = 30
BACKGROUND = 40
BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE, N_COLORS = range(9)
class _FormattingState(_textattributes._FormattingStateMixin):
"""
Represents the formatting state/attributes of a single character.
Character set, intensity, underlinedness, blinkitude, video
reversal, as well as foreground and background colors made up a
character's attributes.
"""
compareAttributes = (
'charset', 'bold', 'underline', 'blink', 'reverseVideo', 'foreground',
'background', '_subtracting')
def __init__(self, charset=insults.G0, bold=False, underline=False,
blink=False, reverseVideo=False, foreground=WHITE,
background=BLACK, _subtracting=False):
self.charset = charset
self.bold = bold
self.underline = underline
self.blink = blink
self.reverseVideo = reverseVideo
self.foreground = foreground
self.background = background
self._subtracting = _subtracting
@deprecated(Version('Twisted', 13, 1, 0))
def wantOne(self, **kw):
"""
Add a character attribute to a copy of this formatting state.
@param **kw: An optional attribute name and value can be provided with
a keyword argument.
@return: A formatting state instance with the new attribute.
@see: L{DefaultFormattingState._withAttribute}.
"""
k, v = kw.popitem()
return self._withAttribute(k, v)
def toVT102(self):
# Spit out a vt102 control sequence that will set up
# all the attributes set here. Except charset.
attrs = []
if self._subtracting:
attrs.append(0)
if self.bold:
attrs.append(insults.BOLD)
if self.underline:
attrs.append(insults.UNDERLINE)
if self.blink:
attrs.append(insults.BLINK)
if self.reverseVideo:
attrs.append(insults.REVERSE_VIDEO)
if self.foreground != WHITE:
attrs.append(FOREGROUND + self.foreground)
if self.background != BLACK:
attrs.append(BACKGROUND + self.background)
if attrs:
return '\x1b[' + ';'.join(map(str, attrs)) + 'm'
return ''
CharacterAttribute = _FormattingState
deprecatedModuleAttribute(
Version('Twisted', 13, 1, 0),
'Use twisted.conch.insults.text.assembleFormattedText instead.',
'twisted.conch.insults.helper',
'CharacterAttribute')
# XXX - need to support scroll regions and scroll history
class TerminalBuffer(protocol.Protocol):
"""
An in-memory terminal emulator.
"""
implements(insults.ITerminalTransport)
for keyID in ('UP_ARROW', 'DOWN_ARROW', 'RIGHT_ARROW', 'LEFT_ARROW',
'HOME', 'INSERT', 'DELETE', 'END', 'PGUP', 'PGDN',
'F1', 'F2', 'F3', 'F4', 'F5', 'F6', 'F7', 'F8', 'F9',
'F10', 'F11', 'F12'):
exec '%s = object()' % (keyID,)
TAB = '\t'
BACKSPACE = '\x7f'
width = 80
height = 24
fill = ' '
void = object()
def getCharacter(self, x, y):
return self.lines[y][x]
def connectionMade(self):
self.reset()
def write(self, bytes):
"""
Add the given printable bytes to the terminal.
Line feeds in C{bytes} will be replaced with carriage return / line
feed pairs.
"""
for b in bytes.replace('\n', '\r\n'):
self.insertAtCursor(b)
def _currentFormattingState(self):
return _FormattingState(self.activeCharset, **self.graphicRendition)
def insertAtCursor(self, b):
"""
Add one byte to the terminal at the cursor and make consequent state
updates.
If b is a carriage return, move the cursor to the beginning of the
current row.
If b is a line feed, move the cursor to the next row or scroll down if
the cursor is already in the last row.
Otherwise, if b is printable, put it at the cursor position (inserting
or overwriting as dictated by the current mode) and move the cursor.
"""
if b == '\r':
self.x = 0
elif b == '\n':
self._scrollDown()
elif b in string.printable:
if self.x >= self.width:
self.nextLine()
ch = (b, self._currentFormattingState())
if self.modes.get(insults.modes.IRM):
self.lines[self.y][self.x:self.x] = [ch]
self.lines[self.y].pop()
else:
self.lines[self.y][self.x] = ch
self.x += 1
def _emptyLine(self, width):
return [(self.void, self._currentFormattingState())
for i in xrange(width)]
def _scrollDown(self):
self.y += 1
if self.y >= self.height:
self.y -= 1
del self.lines[0]
self.lines.append(self._emptyLine(self.width))
def _scrollUp(self):
self.y -= 1
if self.y < 0:
self.y = 0
del self.lines[-1]
self.lines.insert(0, self._emptyLine(self.width))
def cursorUp(self, n=1):
self.y = max(0, self.y - n)
def cursorDown(self, n=1):
self.y = min(self.height - 1, self.y + n)
def cursorBackward(self, n=1):
self.x = max(0, self.x - n)
def cursorForward(self, n=1):
self.x = min(self.width, self.x + n)
def cursorPosition(self, column, line):
self.x = column
self.y = line
def cursorHome(self):
self.x = self.home.x
self.y = self.home.y
def index(self):
self._scrollDown()
def reverseIndex(self):
self._scrollUp()
def nextLine(self):
"""
Update the cursor position attributes and scroll down if appropriate.
"""
self.x = 0
self._scrollDown()
def saveCursor(self):
self._savedCursor = (self.x, self.y)
def restoreCursor(self):
self.x, self.y = self._savedCursor
del self._savedCursor
def setModes(self, modes):
for m in modes:
self.modes[m] = True
def resetModes(self, modes):
for m in modes:
try:
del self.modes[m]
except KeyError:
pass
def setPrivateModes(self, modes):
"""
Enable the given modes.
Track which modes have been enabled so that the implementations of
other L{insults.ITerminalTransport} methods can be properly implemented
to respect these settings.
@see: L{resetPrivateModes}
@see: L{insults.ITerminalTransport.setPrivateModes}
"""
for m in modes:
self.privateModes[m] = True
def resetPrivateModes(self, modes):
"""
Disable the given modes.
@see: L{setPrivateModes}
@see: L{insults.ITerminalTransport.resetPrivateModes}
"""
for m in modes:
try:
del self.privateModes[m]
except KeyError:
pass
def applicationKeypadMode(self):
self.keypadMode = 'app'
def numericKeypadMode(self):
self.keypadMode = 'num'
def selectCharacterSet(self, charSet, which):
self.charsets[which] = charSet
def shiftIn(self):
self.activeCharset = insults.G0
def shiftOut(self):
self.activeCharset = insults.G1
def singleShift2(self):
oldActiveCharset = self.activeCharset
self.activeCharset = insults.G2
f = self.insertAtCursor
def insertAtCursor(b):
f(b)
del self.insertAtCursor
self.activeCharset = oldActiveCharset
self.insertAtCursor = insertAtCursor
def singleShift3(self):
oldActiveCharset = self.activeCharset
self.activeCharset = insults.G3
f = self.insertAtCursor
def insertAtCursor(b):
f(b)
del self.insertAtCursor
self.activeCharset = oldActiveCharset
self.insertAtCursor = insertAtCursor
def selectGraphicRendition(self, *attributes):
for a in attributes:
if a == insults.NORMAL:
self.graphicRendition = {
'bold': False,
'underline': False,
'blink': False,
'reverseVideo': False,
'foreground': WHITE,
'background': BLACK}
elif a == insults.BOLD:
self.graphicRendition['bold'] = True
elif a == insults.UNDERLINE:
self.graphicRendition['underline'] = True
elif a == insults.BLINK:
self.graphicRendition['blink'] = True
elif a == insults.REVERSE_VIDEO:
self.graphicRendition['reverseVideo'] = True
else:
try:
v = int(a)
except ValueError:
log.msg("Unknown graphic rendition attribute: " + repr(a))
else:
if FOREGROUND <= v <= FOREGROUND + N_COLORS:
self.graphicRendition['foreground'] = v - FOREGROUND
elif BACKGROUND <= v <= BACKGROUND + N_COLORS:
self.graphicRendition['background'] = v - BACKGROUND
else:
log.msg("Unknown graphic rendition attribute: " + repr(a))
def eraseLine(self):
self.lines[self.y] = self._emptyLine(self.width)
def eraseToLineEnd(self):
width = self.width - self.x
self.lines[self.y][self.x:] = self._emptyLine(width)
def eraseToLineBeginning(self):
self.lines[self.y][:self.x + 1] = self._emptyLine(self.x + 1)
def eraseDisplay(self):
self.lines = [self._emptyLine(self.width) for i in xrange(self.height)]
def eraseToDisplayEnd(self):
self.eraseToLineEnd()
height = self.height - self.y - 1
self.lines[self.y + 1:] = [self._emptyLine(self.width) for i in range(height)]
def eraseToDisplayBeginning(self):
self.eraseToLineBeginning()
self.lines[:self.y] = [self._emptyLine(self.width) for i in range(self.y)]
def deleteCharacter(self, n=1):
del self.lines[self.y][self.x:self.x+n]
self.lines[self.y].extend(self._emptyLine(min(self.width - self.x, n)))
def insertLine(self, n=1):
self.lines[self.y:self.y] = [self._emptyLine(self.width) for i in range(n)]
del self.lines[self.height:]
def deleteLine(self, n=1):
del self.lines[self.y:self.y+n]
self.lines.extend([self._emptyLine(self.width) for i in range(n)])
def reportCursorPosition(self):
return (self.x, self.y)
def reset(self):
self.home = insults.Vector(0, 0)
self.x = self.y = 0
self.modes = {}
self.privateModes = {}
self.setPrivateModes([insults.privateModes.AUTO_WRAP,
insults.privateModes.CURSOR_MODE])
self.numericKeypad = 'app'
self.activeCharset = insults.G0
self.graphicRendition = {
'bold': False,
'underline': False,
'blink': False,
'reverseVideo': False,
'foreground': WHITE,
'background': BLACK}
self.charsets = {
insults.G0: insults.CS_US,
insults.G1: insults.CS_US,
insults.G2: insults.CS_ALTERNATE,
insults.G3: insults.CS_ALTERNATE_SPECIAL}
self.eraseDisplay()
def unhandledControlSequence(self, buf):
print 'Could not handle', repr(buf)
def __str__(self):
lines = []
for L in self.lines:
buf = []
length = 0
for (ch, attr) in L:
if ch is not self.void:
buf.append(ch)
length = len(buf)
else:
buf.append(self.fill)
lines.append(''.join(buf[:length]))
return '\n'.join(lines)
class ExpectationTimeout(Exception):
pass
class ExpectableBuffer(TerminalBuffer):
_mark = 0
def connectionMade(self):
TerminalBuffer.connectionMade(self)
self._expecting = []
def write(self, bytes):
TerminalBuffer.write(self, bytes)
self._checkExpected()
def cursorHome(self):
TerminalBuffer.cursorHome(self)
self._mark = 0
def _timeoutExpected(self, d):
d.errback(ExpectationTimeout())
self._checkExpected()
def _checkExpected(self):
s = str(self)[self._mark:]
while self._expecting:
expr, timer, deferred = self._expecting[0]
if timer and not timer.active():
del self._expecting[0]
continue
for match in expr.finditer(s):
if timer:
timer.cancel()
del self._expecting[0]
self._mark += match.end()
s = s[match.end():]
deferred.callback(match)
break
else:
return
def expect(self, expression, timeout=None, scheduler=reactor):
d = defer.Deferred()
timer = None
if timeout:
timer = scheduler.callLater(timeout, self._timeoutExpected, d)
self._expecting.append((re.compile(expression), timer, d))
self._checkExpected()
return d
__all__ = [
'CharacterAttribute', 'TerminalBuffer', 'ExpectableBuffer']
| gpl-2.0 |
thiagopnts/servo | tests/wpt/web-platform-tests/webdriver/tests/actions/mouse.py | 3 | 4720 | import pytest
from tests.actions.support.mouse import get_center
from tests.actions.support.refine import get_events, filter_dict
from tests.support.asserts import assert_move_to_coordinates
from tests.support.inline import inline
from tests.support.wait import wait
def link_doc(dest):
content = "<a href=\"{}\" id=\"link\">destination</a>".format(dest)
return inline(content)
# TODO use pytest.approx once we upgrade to pytest > 3.0
def approx(n, m, tolerance=1):
return abs(n - m) <= tolerance
def test_click_at_coordinates(session, test_actions_page, mouse_chain):
div_point = {
"x": 82,
"y": 187,
}
mouse_chain \
.pointer_move(div_point["x"], div_point["y"], duration=1000) \
.click() \
.perform()
events = get_events(session)
assert len(events) == 4
assert_move_to_coordinates(div_point, "outer", events)
for e in events:
if e["type"] != "mousedown":
assert e["buttons"] == 0
assert e["button"] == 0
expected = [
{"type": "mousedown", "buttons": 1},
{"type": "mouseup", "buttons": 0},
{"type": "click", "buttons": 0},
]
filtered_events = [filter_dict(e, expected[0]) for e in events]
assert expected == filtered_events[1:]
def test_context_menu_at_coordinates(session, test_actions_page, mouse_chain):
div_point = {
"x": 82,
"y": 187,
}
mouse_chain \
.pointer_move(div_point["x"], div_point["y"]) \
.pointer_down(button=2) \
.pointer_up(button=2) \
.perform()
events = get_events(session)
expected = [
{"type": "mousedown", "button": 2},
{"type": "contextmenu", "button": 2},
]
assert len(events) == 4
filtered_events = [filter_dict(e, expected[0]) for e in events]
mousedown_contextmenu_events = [
x for x in filtered_events
if x["type"] in ["mousedown", "contextmenu"]
]
assert expected == mousedown_contextmenu_events
def test_click_element_center(session, test_actions_page, mouse_chain):
outer = session.find.css("#outer", all=False)
center = get_center(outer.rect)
mouse_chain.click(element=outer).perform()
events = get_events(session)
assert len(events) == 4
event_types = [e["type"] for e in events]
assert ["mousemove", "mousedown", "mouseup", "click"] == event_types
for e in events:
if e["type"] != "mousemove":
assert approx(e["pageX"], center["x"])
assert approx(e["pageY"], center["y"])
assert e["target"] == "outer"
def test_click_navigation(session, url, release_actions):
destination = url("/webdriver/tests/actions/support/test_actions_wdspec.html")
start = link_doc(destination)
def click(link):
mouse_chain = session.actions.sequence(
"pointer", "pointer_id", {"pointerType": "mouse"})
mouse_chain.click(element=link).perform()
session.url = start
error_message = "Did not navigate to %s" % destination
click(session.find.css("#link", all=False))
wait(session, lambda s: s.url == destination, error_message)
# repeat steps to check behaviour after document unload
session.url = start
click(session.find.css("#link", all=False))
wait(session, lambda s: s.url == destination, error_message)
@pytest.mark.parametrize("drag_duration", [0, 300, 800])
@pytest.mark.parametrize("dx, dy",
[(20, 0), (0, 15), (10, 15), (-20, 0), (10, -15), (-10, -15)])
def test_drag_and_drop(session,
test_actions_page,
mouse_chain,
dx,
dy,
drag_duration):
drag_target = session.find.css("#dragTarget", all=False)
initial_rect = drag_target.rect
initial_center = get_center(initial_rect)
# Conclude chain with extra move to allow time for last queued
# coordinate-update of drag_target and to test that drag_target is "dropped".
mouse_chain \
.pointer_move(0, 0, origin=drag_target) \
.pointer_down() \
.pointer_move(dx, dy, duration=drag_duration, origin="pointer") \
.pointer_up() \
.pointer_move(80, 50, duration=100, origin="pointer") \
.perform()
# mouseup that ends the drag is at the expected destination
e = get_events(session)[1]
assert e["type"] == "mouseup"
assert approx(e["pageX"], initial_center["x"] + dx)
assert approx(e["pageY"], initial_center["y"] + dy)
# check resulting location of the dragged element
final_rect = drag_target.rect
assert initial_rect["x"] + dx == final_rect["x"]
assert initial_rect["y"] + dy == final_rect["y"]
| mpl-2.0 |
johnny-bui/pygments-sablecc | pygments/styles/borland.py | 75 | 1562 | # -*- coding: utf-8 -*-
"""
pygments.styles.borland
~~~~~~~~~~~~~~~~~~~~~~~
Style similar to the style used in the Borland IDEs.
:copyright: Copyright 2006-2010 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.style import Style
from pygments.token import Keyword, Name, Comment, String, Error, \
Number, Operator, Generic, Whitespace
class BorlandStyle(Style):
"""
Style similar to the style used in the borland IDEs.
"""
default_style = ''
styles = {
Whitespace: '#bbbbbb',
Comment: 'italic #008800',
Comment.Preproc: 'noitalic #008080',
Comment.Special: 'noitalic bold',
String: '#0000FF',
String.Char: '#800080',
Number: '#0000FF',
Keyword: 'bold #000080',
Operator.Word: 'bold',
Name.Tag: 'bold #000080',
Name.Attribute: '#FF0000',
Generic.Heading: '#999999',
Generic.Subheading: '#aaaaaa',
Generic.Deleted: 'bg:#ffdddd #000000',
Generic.Inserted: 'bg:#ddffdd #000000',
Generic.Error: '#aa0000',
Generic.Emph: 'italic',
Generic.Strong: 'bold',
Generic.Prompt: '#555555',
Generic.Output: '#888888',
Generic.Traceback: '#aa0000',
Error: 'bg:#e3d2d2 #a61717'
}
| bsd-2-clause |
rahuldhote/odoo | addons/crm_partner_assign/report/crm_lead_report.py | 309 | 5104 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields,osv
from openerp import tools
from openerp.addons.crm import crm
class crm_lead_report_assign(osv.osv):
""" CRM Lead Report """
_name = "crm.lead.report.assign"
_auto = False
_description = "CRM Lead Report"
_columns = {
'partner_assigned_id':fields.many2one('res.partner', 'Partner', readonly=True),
'grade_id':fields.many2one('res.partner.grade', 'Grade', readonly=True),
'user_id':fields.many2one('res.users', 'User', readonly=True),
'country_id':fields.many2one('res.country', 'Country', readonly=True),
'section_id':fields.many2one('crm.case.section', 'Sales Team', readonly=True),
'company_id': fields.many2one('res.company', 'Company', readonly=True),
'date_assign': fields.date('Assign Date', readonly=True),
'create_date': fields.datetime('Create Date', readonly=True),
'delay_open': fields.float('Delay to Assign',digits=(16,2),readonly=True, group_operator="avg",help="Number of Days to open the case"),
'delay_close': fields.float('Delay to Close',digits=(16,2),readonly=True, group_operator="avg",help="Number of Days to close the case"),
'delay_expected': fields.float('Overpassed Deadline',digits=(16,2),readonly=True, group_operator="avg"),
'probability': fields.float('Avg Probability',digits=(16,2),readonly=True, group_operator="avg"),
'probability_max': fields.float('Max Probability',digits=(16,2),readonly=True, group_operator="max"),
'planned_revenue': fields.float('Planned Revenue',digits=(16,2),readonly=True),
'probable_revenue': fields.float('Probable Revenue', digits=(16,2),readonly=True),
'stage_id': fields.many2one ('crm.case.stage', 'Stage', domain="[('section_ids', '=', section_id)]"),
'partner_id': fields.many2one('res.partner', 'Customer' , readonly=True),
'opening_date': fields.datetime('Opening Date', readonly=True),
'date_closed': fields.datetime('Close Date', readonly=True),
'nbr': fields.integer('# of Cases', readonly=True), # TDE FIXME master: rename into nbr_cases
'company_id': fields.many2one('res.company', 'Company', readonly=True),
'priority': fields.selection(crm.AVAILABLE_PRIORITIES, 'Priority'),
'type':fields.selection([
('lead','Lead'),
('opportunity','Opportunity')
],'Type', help="Type is used to separate Leads and Opportunities"),
}
def init(self, cr):
"""
CRM Lead Report
@param cr: the current row, from the database cursor
"""
tools.drop_view_if_exists(cr, 'crm_lead_report_assign')
cr.execute("""
CREATE OR REPLACE VIEW crm_lead_report_assign AS (
SELECT
c.id,
c.date_open as opening_date,
c.date_closed as date_closed,
c.date_assign,
c.user_id,
c.probability,
c.probability as probability_max,
c.stage_id,
c.type,
c.company_id,
c.priority,
c.section_id,
c.partner_id,
c.country_id,
c.planned_revenue,
c.partner_assigned_id,
p.grade_id,
p.date as partner_date,
c.planned_revenue*(c.probability/100) as probable_revenue,
1 as nbr,
c.create_date as create_date,
extract('epoch' from (c.write_date-c.create_date))/(3600*24) as delay_close,
extract('epoch' from (c.date_deadline - c.date_closed))/(3600*24) as delay_expected,
extract('epoch' from (c.date_open-c.create_date))/(3600*24) as delay_open
FROM
crm_lead c
left join res_partner p on (c.partner_assigned_id=p.id)
)""")
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
fedorpatlin/ansible | lib/ansible/modules/system/osx_defaults.py | 66 | 14482 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2014, GeekChimp - Franck Nijhof <franck@geekchimp.com>
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['stableinterface'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: osx_defaults
author: Franck Nijhof (@frenck)
short_description: osx_defaults allows users to read, write, and delete Mac OS X user defaults from Ansible
description:
- osx_defaults allows users to read, write, and delete Mac OS X user defaults from Ansible scripts.
Mac OS X applications and other programs use the defaults system to record user preferences and other
information that must be maintained when the applications aren't running (such as default font for new
documents, or the position of an Info panel).
version_added: "2.0"
options:
domain:
description:
- The domain is a domain name of the form com.companyname.appname.
required: false
default: NSGlobalDomain
host:
description:
- The host on which the preference should apply. The special value "currentHost" corresponds to the
"-currentHost" switch of the defaults commandline tool.
required: false
default: null
version_added: "2.1"
key:
description:
- The key of the user preference
required: true
type:
description:
- The type of value to write.
required: false
default: string
choices: [ "array", "bool", "boolean", "date", "float", "int", "integer", "string" ]
array_add:
description:
- Add new elements to the array for a key which has an array as its value.
required: false
default: false
choices: [ "true", "false" ]
value:
description:
- The value to write. Only required when state = present.
required: false
default: null
state:
description:
- The state of the user defaults
required: false
default: present
choices: [ "present", "absent" ]
notes:
- Apple Mac caches defaults. You may need to logout and login to apply the changes.
'''
EXAMPLES = '''
- osx_defaults:
domain: com.apple.Safari
key: IncludeInternalDebugMenu
type: bool
value: true
state: present
- osx_defaults:
domain: NSGlobalDomain
key: AppleMeasurementUnits
type: string
value: Centimeters
state: present
- osx_defaults:
domain: com.apple.screensaver
host: currentHost
key: showClock
type: int
value: 1
- osx_defaults:
key: AppleMeasurementUnits
type: string
value: Centimeters
- osx_defaults:
key: AppleLanguages
type: array
value:
- en
- nl
- osx_defaults:
domain: com.geekchimp.macable
key: ExampleKeyToRemove
state: absent
'''
import datetime
import re
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.pycompat24 import get_exception
# exceptions --------------------------------------------------------------- {{{
class OSXDefaultsException(Exception):
pass
# /exceptions -------------------------------------------------------------- }}}
# class MacDefaults -------------------------------------------------------- {{{
class OSXDefaults(object):
""" Class to manage Mac OS user defaults """
# init ---------------------------------------------------------------- {{{
""" Initialize this module. Finds 'defaults' executable and preps the parameters """
def __init__(self, **kwargs):
# Initial var for storing current defaults value
self.current_value = None
# Just set all given parameters
for key, val in kwargs.items():
setattr(self, key, val)
# Try to find the defaults executable
self.executable = self.module.get_bin_path(
'defaults',
required=False,
opt_dirs=self.path.split(':'),
)
if not self.executable:
raise OSXDefaultsException("Unable to locate defaults executable.")
# When state is present, we require a parameter
if self.state == "present" and self.value is None:
raise OSXDefaultsException("Missing value parameter")
# Ensure the value is the correct type
self.value = self._convert_type(self.type, self.value)
# /init --------------------------------------------------------------- }}}
# tools --------------------------------------------------------------- {{{
""" Converts value to given type """
def _convert_type(self, type, value):
if type == "string":
return str(value)
elif type in ["bool", "boolean"]:
if isinstance(value, basestring):
value = value.lower()
if value in [True, 1, "true", "1", "yes"]:
return True
elif value in [False, 0, "false", "0", "no"]:
return False
raise OSXDefaultsException("Invalid boolean value: {0}".format(repr(value)))
elif type == "date":
try:
return datetime.datetime.strptime(value.split("+")[0].strip(), "%Y-%m-%d %H:%M:%S")
except ValueError:
raise OSXDefaultsException(
"Invalid date value: {0}. Required format yyy-mm-dd hh:mm:ss.".format(repr(value))
)
elif type in ["int", "integer"]:
if not str(value).isdigit():
raise OSXDefaultsException("Invalid integer value: {0}".format(repr(value)))
return int(value)
elif type == "float":
try:
value = float(value)
except ValueError:
raise OSXDefaultsException("Invalid float value: {0}".format(repr(value)))
return value
elif type == "array":
if not isinstance(value, list):
raise OSXDefaultsException("Invalid value. Expected value to be an array")
return value
raise OSXDefaultsException('Type is not supported: {0}'.format(type))
""" Returns a normalized list of commandline arguments based on the "host" attribute """
def _host_args(self):
if self.host is None:
return []
elif self.host == 'currentHost':
return ['-currentHost']
else:
return ['-host', self.host]
""" Returns a list containing the "defaults" executable and any common base arguments """
def _base_command(self):
return [self.executable] + self._host_args()
""" Converts array output from defaults to an list """
@staticmethod
def _convert_defaults_str_to_list(value):
# Split output of defaults. Every line contains a value
value = value.splitlines()
# Remove first and last item, those are not actual values
value.pop(0)
value.pop(-1)
# Remove extra spaces and comma (,) at the end of values
value = [re.sub(',$', '', x.strip(' ')) for x in value]
return value
# /tools -------------------------------------------------------------- }}}
# commands ------------------------------------------------------------ {{{
""" Reads value of this domain & key from defaults """
def read(self):
# First try to find out the type
rc, out, err = self.module.run_command(self._base_command() + ["read-type", self.domain, self.key])
# If RC is 1, the key does not exists
if rc == 1:
return None
# If the RC is not 0, then terrible happened! Ooooh nooo!
if rc != 0:
raise OSXDefaultsException("An error occurred while reading key type from defaults: " + out)
# Ok, lets parse the type from output
type = out.strip().replace('Type is ', '')
# Now get the current value
rc, out, err = self.module.run_command(self._base_command() + ["read", self.domain, self.key])
# Strip output
out = out.strip()
# An non zero RC at this point is kinda strange...
if rc != 0:
raise OSXDefaultsException("An error occurred while reading key value from defaults: " + out)
# Convert string to list when type is array
if type == "array":
out = self._convert_defaults_str_to_list(out)
# Store the current_value
self.current_value = self._convert_type(type, out)
""" Writes value to this domain & key to defaults """
def write(self):
# We need to convert some values so the defaults commandline understands it
if isinstance(self.value, bool):
if self.value:
value = "TRUE"
else:
value = "FALSE"
elif isinstance(self.value, (int, float)):
value = str(self.value)
elif self.array_add and self.current_value is not None:
value = list(set(self.value) - set(self.current_value))
elif isinstance(self.value, datetime.datetime):
value = self.value.strftime('%Y-%m-%d %H:%M:%S')
else:
value = self.value
# When the type is array and array_add is enabled, morph the type :)
if self.type == "array" and self.array_add:
self.type = "array-add"
# All values should be a list, for easy passing it to the command
if not isinstance(value, list):
value = [value]
rc, out, err = self.module.run_command(self._base_command() + ['write', self.domain, self.key, '-' + self.type] + value)
if rc != 0:
raise OSXDefaultsException('An error occurred while writing value to defaults: ' + out)
""" Deletes defaults key from domain """
def delete(self):
rc, out, err = self.module.run_command(self._base_command() + ['delete', self.domain, self.key])
if rc != 0:
raise OSXDefaultsException("An error occurred while deleting key from defaults: " + out)
# /commands ----------------------------------------------------------- }}}
# run ----------------------------------------------------------------- {{{
""" Does the magic! :) """
def run(self):
# Get the current value from defaults
self.read()
# Handle absent state
if self.state == "absent":
if self.current_value is None:
return False
if self.module.check_mode:
return True
self.delete()
return True
# There is a type mismatch! Given type does not match the type in defaults
value_type = type(self.value)
if self.current_value is not None and not isinstance(self.current_value, value_type):
raise OSXDefaultsException("Type mismatch. Type in defaults: " + type(self.current_value).__name__)
# Current value matches the given value. Nothing need to be done. Arrays need extra care
if self.type == "array" and self.current_value is not None and not self.array_add and \
set(self.current_value) == set(self.value):
return False
elif self.type == "array" and self.current_value is not None and self.array_add and \
len(list(set(self.value) - set(self.current_value))) == 0:
return False
elif self.current_value == self.value:
return False
if self.module.check_mode:
return True
# Change/Create/Set given key/value for domain in defaults
self.write()
return True
# /run ---------------------------------------------------------------- }}}
# /class MacDefaults ------------------------------------------------------ }}}
# main -------------------------------------------------------------------- {{{
def main():
module = AnsibleModule(
argument_spec=dict(
domain=dict(
default="NSGlobalDomain",
required=False,
),
host=dict(
default=None,
required=False,
),
key=dict(
default=None,
),
type=dict(
default="string",
required=False,
choices=[
"array",
"bool",
"boolean",
"date",
"float",
"int",
"integer",
"string",
],
),
array_add=dict(
default=False,
required=False,
type='bool',
),
value=dict(
default=None,
required=False,
type='raw'
),
state=dict(
default="present",
required=False,
choices=[
"absent", "present"
],
),
path=dict(
default="/usr/bin:/usr/local/bin",
required=False,
)
),
supports_check_mode=True,
)
domain = module.params['domain']
host = module.params['host']
key = module.params['key']
type = module.params['type']
array_add = module.params['array_add']
value = module.params['value']
state = module.params['state']
path = module.params['path']
try:
defaults = OSXDefaults(module=module, domain=domain, host=host, key=key, type=type,
array_add=array_add, value=value, state=state, path=path)
changed = defaults.run()
module.exit_json(changed=changed)
except OSXDefaultsException:
e = get_exception()
module.fail_json(msg=e.message)
# /main ------------------------------------------------------------------- }}}
if __name__ == '__main__':
main()
| gpl-3.0 |
codeworldprodigy/lab4 | lib/jinja2/testsuite/filters.py | 394 | 19169 | # -*- coding: utf-8 -*-
"""
jinja2.testsuite.filters
~~~~~~~~~~~~~~~~~~~~~~~~
Tests for the jinja filters.
:copyright: (c) 2010 by the Jinja Team.
:license: BSD, see LICENSE for more details.
"""
import unittest
from jinja2.testsuite import JinjaTestCase
from jinja2 import Markup, Environment
from jinja2._compat import text_type, implements_to_string
env = Environment()
class FilterTestCase(JinjaTestCase):
def test_filter_calling(self):
rv = env.call_filter('sum', [1, 2, 3])
self.assert_equal(rv, 6)
def test_capitalize(self):
tmpl = env.from_string('{{ "foo bar"|capitalize }}')
assert tmpl.render() == 'Foo bar'
def test_center(self):
tmpl = env.from_string('{{ "foo"|center(9) }}')
assert tmpl.render() == ' foo '
def test_default(self):
tmpl = env.from_string(
"{{ missing|default('no') }}|{{ false|default('no') }}|"
"{{ false|default('no', true) }}|{{ given|default('no') }}"
)
assert tmpl.render(given='yes') == 'no|False|no|yes'
def test_dictsort(self):
tmpl = env.from_string(
'{{ foo|dictsort }}|'
'{{ foo|dictsort(true) }}|'
'{{ foo|dictsort(false, "value") }}'
)
out = tmpl.render(foo={"aa": 0, "b": 1, "c": 2, "AB": 3})
assert out == ("[('aa', 0), ('AB', 3), ('b', 1), ('c', 2)]|"
"[('AB', 3), ('aa', 0), ('b', 1), ('c', 2)]|"
"[('aa', 0), ('b', 1), ('c', 2), ('AB', 3)]")
def test_batch(self):
tmpl = env.from_string("{{ foo|batch(3)|list }}|"
"{{ foo|batch(3, 'X')|list }}")
out = tmpl.render(foo=list(range(10)))
assert out == ("[[0, 1, 2], [3, 4, 5], [6, 7, 8], [9]]|"
"[[0, 1, 2], [3, 4, 5], [6, 7, 8], [9, 'X', 'X']]")
def test_slice(self):
tmpl = env.from_string('{{ foo|slice(3)|list }}|'
'{{ foo|slice(3, "X")|list }}')
out = tmpl.render(foo=list(range(10)))
assert out == ("[[0, 1, 2, 3], [4, 5, 6], [7, 8, 9]]|"
"[[0, 1, 2, 3], [4, 5, 6, 'X'], [7, 8, 9, 'X']]")
def test_escape(self):
tmpl = env.from_string('''{{ '<">&'|escape }}''')
out = tmpl.render()
assert out == '<">&'
def test_striptags(self):
tmpl = env.from_string('''{{ foo|striptags }}''')
out = tmpl.render(foo=' <p>just a small \n <a href="#">'
'example</a> link</p>\n<p>to a webpage</p> '
'<!-- <p>and some commented stuff</p> -->')
assert out == 'just a small example link to a webpage'
def test_filesizeformat(self):
tmpl = env.from_string(
'{{ 100|filesizeformat }}|'
'{{ 1000|filesizeformat }}|'
'{{ 1000000|filesizeformat }}|'
'{{ 1000000000|filesizeformat }}|'
'{{ 1000000000000|filesizeformat }}|'
'{{ 100|filesizeformat(true) }}|'
'{{ 1000|filesizeformat(true) }}|'
'{{ 1000000|filesizeformat(true) }}|'
'{{ 1000000000|filesizeformat(true) }}|'
'{{ 1000000000000|filesizeformat(true) }}'
)
out = tmpl.render()
self.assert_equal(out, (
'100 Bytes|1.0 kB|1.0 MB|1.0 GB|1.0 TB|100 Bytes|'
'1000 Bytes|976.6 KiB|953.7 MiB|931.3 GiB'
))
def test_filesizeformat_issue59(self):
tmpl = env.from_string(
'{{ 300|filesizeformat }}|'
'{{ 3000|filesizeformat }}|'
'{{ 3000000|filesizeformat }}|'
'{{ 3000000000|filesizeformat }}|'
'{{ 3000000000000|filesizeformat }}|'
'{{ 300|filesizeformat(true) }}|'
'{{ 3000|filesizeformat(true) }}|'
'{{ 3000000|filesizeformat(true) }}'
)
out = tmpl.render()
self.assert_equal(out, (
'300 Bytes|3.0 kB|3.0 MB|3.0 GB|3.0 TB|300 Bytes|'
'2.9 KiB|2.9 MiB'
))
def test_first(self):
tmpl = env.from_string('{{ foo|first }}')
out = tmpl.render(foo=list(range(10)))
assert out == '0'
def test_float(self):
tmpl = env.from_string('{{ "42"|float }}|'
'{{ "ajsghasjgd"|float }}|'
'{{ "32.32"|float }}')
out = tmpl.render()
assert out == '42.0|0.0|32.32'
def test_format(self):
tmpl = env.from_string('''{{ "%s|%s"|format("a", "b") }}''')
out = tmpl.render()
assert out == 'a|b'
def test_indent(self):
tmpl = env.from_string('{{ foo|indent(2) }}|{{ foo|indent(2, true) }}')
text = '\n'.join([' '.join(['foo', 'bar'] * 2)] * 2)
out = tmpl.render(foo=text)
assert out == ('foo bar foo bar\n foo bar foo bar| '
'foo bar foo bar\n foo bar foo bar')
def test_int(self):
tmpl = env.from_string('{{ "42"|int }}|{{ "ajsghasjgd"|int }}|'
'{{ "32.32"|int }}')
out = tmpl.render()
assert out == '42|0|32'
def test_join(self):
tmpl = env.from_string('{{ [1, 2, 3]|join("|") }}')
out = tmpl.render()
assert out == '1|2|3'
env2 = Environment(autoescape=True)
tmpl = env2.from_string('{{ ["<foo>", "<span>foo</span>"|safe]|join }}')
assert tmpl.render() == '<foo><span>foo</span>'
def test_join_attribute(self):
class User(object):
def __init__(self, username):
self.username = username
tmpl = env.from_string('''{{ users|join(', ', 'username') }}''')
assert tmpl.render(users=map(User, ['foo', 'bar'])) == 'foo, bar'
def test_last(self):
tmpl = env.from_string('''{{ foo|last }}''')
out = tmpl.render(foo=list(range(10)))
assert out == '9'
def test_length(self):
tmpl = env.from_string('''{{ "hello world"|length }}''')
out = tmpl.render()
assert out == '11'
def test_lower(self):
tmpl = env.from_string('''{{ "FOO"|lower }}''')
out = tmpl.render()
assert out == 'foo'
def test_pprint(self):
from pprint import pformat
tmpl = env.from_string('''{{ data|pprint }}''')
data = list(range(1000))
assert tmpl.render(data=data) == pformat(data)
def test_random(self):
tmpl = env.from_string('''{{ seq|random }}''')
seq = list(range(100))
for _ in range(10):
assert int(tmpl.render(seq=seq)) in seq
def test_reverse(self):
tmpl = env.from_string('{{ "foobar"|reverse|join }}|'
'{{ [1, 2, 3]|reverse|list }}')
assert tmpl.render() == 'raboof|[3, 2, 1]'
def test_string(self):
x = [1, 2, 3, 4, 5]
tmpl = env.from_string('''{{ obj|string }}''')
assert tmpl.render(obj=x) == text_type(x)
def test_title(self):
tmpl = env.from_string('''{{ "foo bar"|title }}''')
assert tmpl.render() == "Foo Bar"
tmpl = env.from_string('''{{ "foo's bar"|title }}''')
assert tmpl.render() == "Foo's Bar"
tmpl = env.from_string('''{{ "foo bar"|title }}''')
assert tmpl.render() == "Foo Bar"
tmpl = env.from_string('''{{ "f bar f"|title }}''')
assert tmpl.render() == "F Bar F"
tmpl = env.from_string('''{{ "foo-bar"|title }}''')
assert tmpl.render() == "Foo-Bar"
tmpl = env.from_string('''{{ "foo\tbar"|title }}''')
assert tmpl.render() == "Foo\tBar"
tmpl = env.from_string('''{{ "FOO\tBAR"|title }}''')
assert tmpl.render() == "Foo\tBar"
def test_truncate(self):
tmpl = env.from_string(
'{{ data|truncate(15, true, ">>>") }}|'
'{{ data|truncate(15, false, ">>>") }}|'
'{{ smalldata|truncate(15) }}'
)
out = tmpl.render(data='foobar baz bar' * 1000,
smalldata='foobar baz bar')
assert out == 'foobar baz barf>>>|foobar baz >>>|foobar baz bar'
def test_upper(self):
tmpl = env.from_string('{{ "foo"|upper }}')
assert tmpl.render() == 'FOO'
def test_urlize(self):
tmpl = env.from_string('{{ "foo http://www.example.com/ bar"|urlize }}')
assert tmpl.render() == 'foo <a href="http://www.example.com/">'\
'http://www.example.com/</a> bar'
def test_wordcount(self):
tmpl = env.from_string('{{ "foo bar baz"|wordcount }}')
assert tmpl.render() == '3'
def test_block(self):
tmpl = env.from_string('{% filter lower|escape %}<HEHE>{% endfilter %}')
assert tmpl.render() == '<hehe>'
def test_chaining(self):
tmpl = env.from_string('''{{ ['<foo>', '<bar>']|first|upper|escape }}''')
assert tmpl.render() == '<FOO>'
def test_sum(self):
tmpl = env.from_string('''{{ [1, 2, 3, 4, 5, 6]|sum }}''')
assert tmpl.render() == '21'
def test_sum_attributes(self):
tmpl = env.from_string('''{{ values|sum('value') }}''')
assert tmpl.render(values=[
{'value': 23},
{'value': 1},
{'value': 18},
]) == '42'
def test_sum_attributes_nested(self):
tmpl = env.from_string('''{{ values|sum('real.value') }}''')
assert tmpl.render(values=[
{'real': {'value': 23}},
{'real': {'value': 1}},
{'real': {'value': 18}},
]) == '42'
def test_sum_attributes_tuple(self):
tmpl = env.from_string('''{{ values.items()|sum('1') }}''')
assert tmpl.render(values={
'foo': 23,
'bar': 1,
'baz': 18,
}) == '42'
def test_abs(self):
tmpl = env.from_string('''{{ -1|abs }}|{{ 1|abs }}''')
assert tmpl.render() == '1|1', tmpl.render()
def test_round_positive(self):
tmpl = env.from_string('{{ 2.7|round }}|{{ 2.1|round }}|'
"{{ 2.1234|round(3, 'floor') }}|"
"{{ 2.1|round(0, 'ceil') }}")
assert tmpl.render() == '3.0|2.0|2.123|3.0', tmpl.render()
def test_round_negative(self):
tmpl = env.from_string('{{ 21.3|round(-1)}}|'
"{{ 21.3|round(-1, 'ceil')}}|"
"{{ 21.3|round(-1, 'floor')}}")
assert tmpl.render() == '20.0|30.0|20.0',tmpl.render()
def test_xmlattr(self):
tmpl = env.from_string("{{ {'foo': 42, 'bar': 23, 'fish': none, "
"'spam': missing, 'blub:blub': '<?>'}|xmlattr }}")
out = tmpl.render().split()
assert len(out) == 3
assert 'foo="42"' in out
assert 'bar="23"' in out
assert 'blub:blub="<?>"' in out
def test_sort1(self):
tmpl = env.from_string('{{ [2, 3, 1]|sort }}|{{ [2, 3, 1]|sort(true) }}')
assert tmpl.render() == '[1, 2, 3]|[3, 2, 1]'
def test_sort2(self):
tmpl = env.from_string('{{ "".join(["c", "A", "b", "D"]|sort) }}')
assert tmpl.render() == 'AbcD'
def test_sort3(self):
tmpl = env.from_string('''{{ ['foo', 'Bar', 'blah']|sort }}''')
assert tmpl.render() == "['Bar', 'blah', 'foo']"
def test_sort4(self):
@implements_to_string
class Magic(object):
def __init__(self, value):
self.value = value
def __str__(self):
return text_type(self.value)
tmpl = env.from_string('''{{ items|sort(attribute='value')|join }}''')
assert tmpl.render(items=map(Magic, [3, 2, 4, 1])) == '1234'
def test_groupby(self):
tmpl = env.from_string('''
{%- for grouper, list in [{'foo': 1, 'bar': 2},
{'foo': 2, 'bar': 3},
{'foo': 1, 'bar': 1},
{'foo': 3, 'bar': 4}]|groupby('foo') -%}
{{ grouper }}{% for x in list %}: {{ x.foo }}, {{ x.bar }}{% endfor %}|
{%- endfor %}''')
assert tmpl.render().split('|') == [
"1: 1, 2: 1, 1",
"2: 2, 3",
"3: 3, 4",
""
]
def test_groupby_tuple_index(self):
tmpl = env.from_string('''
{%- for grouper, list in [('a', 1), ('a', 2), ('b', 1)]|groupby(0) -%}
{{ grouper }}{% for x in list %}:{{ x.1 }}{% endfor %}|
{%- endfor %}''')
assert tmpl.render() == 'a:1:2|b:1|'
def test_groupby_multidot(self):
class Date(object):
def __init__(self, day, month, year):
self.day = day
self.month = month
self.year = year
class Article(object):
def __init__(self, title, *date):
self.date = Date(*date)
self.title = title
articles = [
Article('aha', 1, 1, 1970),
Article('interesting', 2, 1, 1970),
Article('really?', 3, 1, 1970),
Article('totally not', 1, 1, 1971)
]
tmpl = env.from_string('''
{%- for year, list in articles|groupby('date.year') -%}
{{ year }}{% for x in list %}[{{ x.title }}]{% endfor %}|
{%- endfor %}''')
assert tmpl.render(articles=articles).split('|') == [
'1970[aha][interesting][really?]',
'1971[totally not]',
''
]
def test_filtertag(self):
tmpl = env.from_string("{% filter upper|replace('FOO', 'foo') %}"
"foobar{% endfilter %}")
assert tmpl.render() == 'fooBAR'
def test_replace(self):
env = Environment()
tmpl = env.from_string('{{ string|replace("o", 42) }}')
assert tmpl.render(string='<foo>') == '<f4242>'
env = Environment(autoescape=True)
tmpl = env.from_string('{{ string|replace("o", 42) }}')
assert tmpl.render(string='<foo>') == '<f4242>'
tmpl = env.from_string('{{ string|replace("<", 42) }}')
assert tmpl.render(string='<foo>') == '42foo>'
tmpl = env.from_string('{{ string|replace("o", ">x<") }}')
assert tmpl.render(string=Markup('foo')) == 'f>x<>x<'
def test_forceescape(self):
tmpl = env.from_string('{{ x|forceescape }}')
assert tmpl.render(x=Markup('<div />')) == u'<div />'
def test_safe(self):
env = Environment(autoescape=True)
tmpl = env.from_string('{{ "<div>foo</div>"|safe }}')
assert tmpl.render() == '<div>foo</div>'
tmpl = env.from_string('{{ "<div>foo</div>" }}')
assert tmpl.render() == '<div>foo</div>'
def test_urlencode(self):
env = Environment(autoescape=True)
tmpl = env.from_string('{{ "Hello, world!"|urlencode }}')
assert tmpl.render() == 'Hello%2C%20world%21'
tmpl = env.from_string('{{ o|urlencode }}')
assert tmpl.render(o=u"Hello, world\u203d") == "Hello%2C%20world%E2%80%BD"
assert tmpl.render(o=(("f", 1),)) == "f=1"
assert tmpl.render(o=(('f', 1), ("z", 2))) == "f=1&z=2"
assert tmpl.render(o=((u"\u203d", 1),)) == "%E2%80%BD=1"
assert tmpl.render(o={u"\u203d": 1}) == "%E2%80%BD=1"
assert tmpl.render(o={0: 1}) == "0=1"
def test_simple_map(self):
env = Environment()
tmpl = env.from_string('{{ ["1", "2", "3"]|map("int")|sum }}')
self.assertEqual(tmpl.render(), '6')
def test_attribute_map(self):
class User(object):
def __init__(self, name):
self.name = name
env = Environment()
users = [
User('john'),
User('jane'),
User('mike'),
]
tmpl = env.from_string('{{ users|map(attribute="name")|join("|") }}')
self.assertEqual(tmpl.render(users=users), 'john|jane|mike')
def test_empty_map(self):
env = Environment()
tmpl = env.from_string('{{ none|map("upper")|list }}')
self.assertEqual(tmpl.render(), '[]')
def test_simple_select(self):
env = Environment()
tmpl = env.from_string('{{ [1, 2, 3, 4, 5]|select("odd")|join("|") }}')
self.assertEqual(tmpl.render(), '1|3|5')
def test_bool_select(self):
env = Environment()
tmpl = env.from_string('{{ [none, false, 0, 1, 2, 3, 4, 5]|select|join("|") }}')
self.assertEqual(tmpl.render(), '1|2|3|4|5')
def test_simple_reject(self):
env = Environment()
tmpl = env.from_string('{{ [1, 2, 3, 4, 5]|reject("odd")|join("|") }}')
self.assertEqual(tmpl.render(), '2|4')
def test_bool_reject(self):
env = Environment()
tmpl = env.from_string('{{ [none, false, 0, 1, 2, 3, 4, 5]|reject|join("|") }}')
self.assertEqual(tmpl.render(), 'None|False|0')
def test_simple_select_attr(self):
class User(object):
def __init__(self, name, is_active):
self.name = name
self.is_active = is_active
env = Environment()
users = [
User('john', True),
User('jane', True),
User('mike', False),
]
tmpl = env.from_string('{{ users|selectattr("is_active")|'
'map(attribute="name")|join("|") }}')
self.assertEqual(tmpl.render(users=users), 'john|jane')
def test_simple_reject_attr(self):
class User(object):
def __init__(self, name, is_active):
self.name = name
self.is_active = is_active
env = Environment()
users = [
User('john', True),
User('jane', True),
User('mike', False),
]
tmpl = env.from_string('{{ users|rejectattr("is_active")|'
'map(attribute="name")|join("|") }}')
self.assertEqual(tmpl.render(users=users), 'mike')
def test_func_select_attr(self):
class User(object):
def __init__(self, id, name):
self.id = id
self.name = name
env = Environment()
users = [
User(1, 'john'),
User(2, 'jane'),
User(3, 'mike'),
]
tmpl = env.from_string('{{ users|selectattr("id", "odd")|'
'map(attribute="name")|join("|") }}')
self.assertEqual(tmpl.render(users=users), 'john|mike')
def test_func_reject_attr(self):
class User(object):
def __init__(self, id, name):
self.id = id
self.name = name
env = Environment()
users = [
User(1, 'john'),
User(2, 'jane'),
User(3, 'mike'),
]
tmpl = env.from_string('{{ users|rejectattr("id", "odd")|'
'map(attribute="name")|join("|") }}')
self.assertEqual(tmpl.render(users=users), 'jane')
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(FilterTestCase))
return suite
| apache-2.0 |
chriscrosscutler/scikit-image | skimage/data/tests/test_data.py | 21 | 1824 | import numpy as np
import skimage.data as data
from numpy.testing import assert_equal, assert_almost_equal
def test_lena():
""" Test that "Lena" image can be loaded. """
lena = data.lena()
assert_equal(lena.shape, (512, 512, 3))
def test_astronaut():
""" Test that "astronaut" image can be loaded. """
astronaut = data.astronaut()
assert_equal(astronaut.shape, (512, 512, 3))
def test_camera():
""" Test that "camera" image can be loaded. """
cameraman = data.camera()
assert_equal(cameraman.ndim, 2)
def test_checkerboard():
""" Test that "checkerboard" image can be loaded. """
data.checkerboard()
def test_text():
""" Test that "text" image can be loaded. """
data.text()
def test_moon():
""" Test that "moon" image can be loaded. """
data.moon()
def test_page():
""" Test that "page" image can be loaded. """
data.page()
def test_clock():
""" Test that "clock" image can be loaded. """
data.clock()
def test_chelsea():
""" Test that "chelsea" image can be loaded. """
data.chelsea()
def test_coffee():
""" Test that "coffee" image can be loaded. """
data.coffee()
def test_binary_blobs():
blobs = data.binary_blobs(length=128)
assert_almost_equal(blobs.mean(), 0.5, decimal=1)
blobs = data.binary_blobs(length=128, volume_fraction=0.25)
assert_almost_equal(blobs.mean(), 0.25, decimal=1)
blobs = data.binary_blobs(length=32, volume_fraction=0.25, n_dim=3)
assert_almost_equal(blobs.mean(), 0.25, decimal=1)
other_realization = data.binary_blobs(length=32, volume_fraction=0.25,
n_dim=3)
assert not np.all(blobs == other_realization)
if __name__ == "__main__":
from numpy.testing import run_module_suite
run_module_suite()
| bsd-3-clause |
rajul/mne-python | mne/time_frequency/stft.py | 24 | 6497 | from math import ceil
import numpy as np
from scipy.fftpack import fft, ifft, fftfreq
from ..utils import logger, verbose
@verbose
def stft(x, wsize, tstep=None, verbose=None):
"""STFT Short-Term Fourier Transform using a sine window.
The transformation is designed to be a tight frame that can be
perfectly inverted. It only returns the positive frequencies.
Parameters
----------
x : 2d array of size n_signals x T
containing multi-channels signal
wsize : int
length of the STFT window in samples (must be a multiple of 4)
tstep : int
step between successive windows in samples (must be a multiple of 2,
a divider of wsize and smaller than wsize/2) (default: wsize/2)
verbose : bool, str, int, or None
If not None, override default verbose level (see mne.verbose).
Returns
-------
X : 3d array of shape [n_signals, wsize / 2 + 1, n_step]
STFT coefficients for positive frequencies with
n_step = ceil(T / tstep)
Examples
--------
X = stft(x, wsize)
X = stft(x, wsize, tstep)
See Also
--------
istft
stftfreq
"""
if not np.isrealobj(x):
raise ValueError("x is not a real valued array")
if x.ndim == 1:
x = x[None, :]
n_signals, T = x.shape
wsize = int(wsize)
# Errors and warnings
if wsize % 4:
raise ValueError('The window length must be a multiple of 4.')
if tstep is None:
tstep = wsize / 2
tstep = int(tstep)
if (wsize % tstep) or (tstep % 2):
raise ValueError('The step size must be a multiple of 2 and a '
'divider of the window length.')
if tstep > wsize / 2:
raise ValueError('The step size must be smaller than half the '
'window length.')
n_step = int(ceil(T / float(tstep)))
n_freq = wsize // 2 + 1
logger.info("Number of frequencies: %d" % n_freq)
logger.info("Number of time steps: %d" % n_step)
X = np.zeros((n_signals, n_freq, n_step), dtype=np.complex)
if n_signals == 0:
return X
# Defining sine window
win = np.sin(np.arange(.5, wsize + .5) / wsize * np.pi)
win2 = win ** 2
swin = np.zeros((n_step - 1) * tstep + wsize)
for t in range(n_step):
swin[t * tstep:t * tstep + wsize] += win2
swin = np.sqrt(wsize * swin)
# Zero-padding and Pre-processing for edges
xp = np.zeros((n_signals, wsize + (n_step - 1) * tstep),
dtype=x.dtype)
xp[:, (wsize - tstep) // 2: (wsize - tstep) // 2 + T] = x
x = xp
for t in range(n_step):
# Framing
wwin = win / swin[t * tstep: t * tstep + wsize]
frame = x[:, t * tstep: t * tstep + wsize] * wwin[None, :]
# FFT
fframe = fft(frame)
X[:, :, t] = fframe[:, :n_freq]
return X
def istft(X, tstep=None, Tx=None):
"""ISTFT Inverse Short-Term Fourier Transform using a sine window
Parameters
----------
X : 3d array of shape [n_signals, wsize / 2 + 1, n_step]
The STFT coefficients for positive frequencies
tstep : int
step between successive windows in samples (must be a multiple of 2,
a divider of wsize and smaller than wsize/2) (default: wsize/2)
Tx : int
Length of returned signal. If None Tx = n_step * tstep
Returns
-------
x : 1d array of length Tx
vector containing the inverse STFT signal
Examples
--------
x = istft(X)
x = istft(X, tstep)
See Also
--------
stft
"""
# Errors and warnings
n_signals, n_win, n_step = X.shape
if (n_win % 2 == 0):
ValueError('The number of rows of the STFT matrix must be odd.')
wsize = 2 * (n_win - 1)
if tstep is None:
tstep = wsize / 2
if wsize % tstep:
raise ValueError('The step size must be a divider of two times the '
'number of rows of the STFT matrix minus two.')
if wsize % 2:
raise ValueError('The step size must be a multiple of 2.')
if tstep > wsize / 2:
raise ValueError('The step size must be smaller than the number of '
'rows of the STFT matrix minus one.')
if Tx is None:
Tx = n_step * tstep
T = n_step * tstep
x = np.zeros((n_signals, T + wsize - tstep), dtype=np.float)
if n_signals == 0:
return x[:, :Tx]
# Defining sine window
win = np.sin(np.arange(.5, wsize + .5) / wsize * np.pi)
# win = win / norm(win);
# Pre-processing for edges
swin = np.zeros(T + wsize - tstep, dtype=np.float)
for t in range(n_step):
swin[t * tstep:t * tstep + wsize] += win ** 2
swin = np.sqrt(swin / wsize)
fframe = np.empty((n_signals, n_win + wsize // 2 - 1), dtype=X.dtype)
for t in range(n_step):
# IFFT
fframe[:, :n_win] = X[:, :, t]
fframe[:, n_win:] = np.conj(X[:, wsize // 2 - 1: 0: -1, t])
frame = ifft(fframe)
wwin = win / swin[t * tstep:t * tstep + wsize]
# Overlap-add
x[:, t * tstep: t * tstep + wsize] += np.real(np.conj(frame) * wwin)
# Truncation
x = x[:, (wsize - tstep) // 2: (wsize - tstep) // 2 + T + 1][:, :Tx].copy()
return x
def stftfreq(wsize, sfreq=None):
"""Frequencies of stft transformation
Parameters
----------
wsize : int
Size of stft window
sfreq : float
Sampling frequency. If None the frequencies are given between 0 and pi
otherwise it's given in Hz.
Returns
-------
freqs : array
The positive frequencies returned by stft
See Also
--------
stft
istft
"""
n_freq = wsize // 2 + 1
freqs = fftfreq(wsize)
freqs = np.abs(freqs[:n_freq])
if sfreq is not None:
freqs *= float(sfreq)
return freqs
def stft_norm2(X):
"""Compute L2 norm of STFT transform
It takes into account that stft only return positive frequencies.
As we use tight frame this quantity is conserved by the stft.
Parameters
----------
X : 3D complex array
The STFT transforms
Returns
-------
norms2 : array
The squared L2 norm of every row of X.
"""
X2 = (X * X.conj()).real
# compute all L2 coefs and remove first and last frequency once.
norms2 = (2. * X2.sum(axis=2).sum(axis=1) - np.sum(X2[:, 0, :], axis=1) -
np.sum(X2[:, -1, :], axis=1))
return norms2
| bsd-3-clause |
acosinwork/Arduino | arduino-core/src/processing/app/i18n/python/update.py | 134 | 1464 | #!/usr/bin/env python
#vim:set fileencoding=utf-8 sw=2 expandtab
def unquote(s):
s = s.strip()
if s[0] != '"' or s[-1] != '"':
raise RuntimeError
return s[1:-1]
def read_po(fp):
if isinstance(fp, str):
fp = open(fp)
d = {}
st = 1
comment = key = rkey = rvalue = ''
for line in fp:
if line[0] == '#' or line.strip() == '':
if st == 2:
d[key] = (comment, rkey, rvalue)
st = 1
comment = key = rkey = rvalue = ''
comment += line
elif line[0] == '"':
if st == 1:
key += unquote(line)
rkey += line
else:
rvalue += line
elif line.startswith('msgid '):
st = 1
key = unquote(line[6:])
rkey = line
elif line.startswith('msgstr '):
st = 2
rvalue = line
else:
raise RuntimeError
if st == 2:
d[key] = (comment, rkey, rvalue)
return d
def dump(d, dstFile):
out = open(dstFile, 'w')
# The first block in file should go first because the key is ''.
for key in sorted(d.keys()):
(comment, rkey, rvalue) = d[key]
out.write(comment)
out.write(rkey)
out.write(rvalue)
out.close()
def merge(d, dd):
for key in dd.keys():
if d.has_key(key):
d[key] = dd[key]
return d
# Remove currently unused catalog text lines from ".po" file.
def main():
import sys
d = read_po(sys.stdin)
dd = read_po(sys.argv[1])
dump(merge(d, dd), sys.argv[1])
if __name__ == '__main__':
main()
| lgpl-2.1 |
mrworf/multiremote | drivers/base.py | 1 | 9963 | # This file is part of multiRemote.
#
# multiRemote is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# multiRemote is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with multiRemote. If not, see <http://www.gnu.org/licenses/>.
#
"""
Simplest driver of all, provides logic for power handling, commands and
simplifications for some of the more nitty-gritty work that all drivers must
do.
It's HIGHLY RECOMMENDED that drivers utilize this class as the base class,
since it provides quite a bit of abstraction and easier power management.
"""
from modules.commandtype import CommandType
import traceback
import logging
import socket
import requests
from xml.etree import ElementTree
from dataclasses import field,make_dataclass
class driverBase:
def __init__(self, *args):
self.power = False
self.COMMAND_HANDLER = {}
self.httpTimeout = 250 # 250ms
self.handlers = []
self.eventManager = None
# Invoke the real init function
self.init(*args)
def setEventManager(self, eventManager):
self.eventManager = eventManager
def init(self):
""" Override to do additional initialization
"""
pass
def eventOn(self):
""" Override to handle power on event
"""
logging.warning("" + repr(self) + " is not implementing power on")
def eventOff(self):
""" Override to handle power off event
"""
logging.warning("" + repr(self) + " is not implementing power off")
def eventExtras(self, keyvalue):
""" Override this to handle extra data
"""
pass
def isAsync(self):
''' Override this to change async behavior, default is True
Async means that multiple instances of this driver can be
used in parallel.
'''
return True
############################################################################
## Functions below provide convenience for the new driver.
############################################################################
def _handleResponse(self, r, contentIsJSON=False, contentIsXML=False):
result = {
'success' : False,
'code': 501,
'content' : None
}
try:
if contentIsJSON:
content = r.json
elif contentIsXML:
content = ElementTree.fromstring(r.content)
else:
content = r.content
result = {
'success' : r.status_code == requests.codes.ok,
'code': r.status_code,
'content' : content
}
except:
logging.exception('Failed to parse result')
return result
def httpGet(self, url, contentIsJSON=False, contentIsXML=False):
result = {
'success' : False,
'code': 500,
'content' : None
}
try:
r = requests.get(url, timeout=self.httpTimeout/1000.0)
print((repr(r.content)))
result = self._handleResponse(r, contentIsXML=contentIsXML, contentIsJSON=contentIsJSON)
except:
logging.exception('HTTP GET failed')
return result
def httpPost(self, url, data = None, contentIsJSON=False, contentIsXML=False):
result = {
'success' : False,
'code': 500,
'content' : None
}
try:
r = requests.post(url, data=data, timeout=self.httpTimeout/1000.0)
self._handleResponse(r, contentIsXML=contentIsXML, contentIsJSON=contentIsJSON)
except:
logging.exception('HTTP POST failed')
return result
def FQDN2IP(self, fqdn, getIPV6 = False):
""" Takes a regular DNS name and resolves it into an IP address instead.
If you provide an IP address, it will simply return the IP address.
"""
try:
family = socket.AF_INET
if getIPV6:
family = socket.AF_INET6
details = socket.getaddrinfo(fqdn, 80, family, socket.SOCK_STREAM)
if details is None or len(details) < 1:
logging.error('Unable to resolve "%s" to a network address', fqdn)
elif len(details) > 1:
logging.warning('"%s" returned %d results, only using the first entry', fqdn, len(details))
return details[0][4][0]
except:
logging.exception('Unable to resolve "%s"', fqdn)
return None
def registerHandler(self, handler, cmds):
""" API: Registers a handler to be called for cmds defined in list
Does not have unregister since this should not change during its lifetime
"""
self.handlers.append({'handler':handler, 'commands': cmds})
def addCommand(self, command, cmdtype, handler, name = None, desc = None, extras = None, args = 0):
""" Convenience function, allows adding commands to the list which
is exposed by getCommands() and handleCommand()
"""
name = command
desc = name
if extras == None:
self.COMMAND_HANDLER[command] = {
"arguments" : args,
"handler" : handler,
"name" : name,
"description" : desc,
"type" : cmdtype
}
else:
self.COMMAND_HANDLER[command] = {
"arguments" : args,
"handler" : handler,
"name" : name,
"description" : desc,
"type" : cmdtype,
"extras" : extras
}
############################################################################
## Functions below are usually not overriden since they provide basic
## housekeeping. It's better to override eventXXX() functions above.
############################################################################
def setPower(self, enable):
""" API: Changes the power state of the device, if the state already
is at the requested value, then nothing happens.
"""
if self.power == enable:
return True
self.power = enable
try:
if enable:
self.eventOn()
else:
self.eventOff()
except:
logging.exception("Exception when calling setPower(%s)" % repr(enable))
return True
def applyExtras(self, keyvaluepairs):
""" API: Called when this device is selected as a scene, can be called more
than once during a powered session, since user may switch between
different scenes which all use the same driver but different extras.
By default, this parses a string that looks like this:
key=value,key=value,...
And calls eventExtras() with a dict, but drivers can override this
directly if needed. Otherwise, eventExtras is the recommended override
method.
"""
result = {}
pairs = keyvaluepairs.split(",")
for pair in pairs:
parts = pair.split("=", 1)
if len(parts) == 2:
result[parts[0].strip()] = parts[1].strip()
if len(result) > 0:
self.eventExtras(result)
def handleCommand(self, zone, command, argument):
""" API: Called by the server whenever a command needs to be executed,
the only exception is power commands, they are ALWAYS called
through the setPower() function.
-- FUTURE: --
Eventually it will do low-level handling of state, what that
means is that certain command types will be grouped and multiple
calls to the same command will only execute the first one.
For example, calling input-hdmi1 three times will only execute
the first time. This is to avoid unnecessary latencies.
A driver will be able to override this behavior by adding a flag
to the command definition.
"""
'''
result = None
for handler in self.handlers:
if command in handler['commands']:
try:
result = handler['handler'](zone, command, argument)
except:
logging.exception("Exception executing command %s for zone %s" % (repr(command), repr(zone)))
break
return result
'''
result = None
if command not in self.COMMAND_HANDLER:
logging.error("%s is not a supported command" % command)
return result
try:
item = self.COMMAND_HANDLER[command]
if item["arguments"] == 0:
if "extras" in item:
result = item["handler"](zone, item["extras"])
else:
result = item["handler"](zone)
elif item["arguments"] == 1:
if "extras" in item:
result = item["handler"](zone, argument[0], item["extras"])
else:
result = item["handler"](zone, argument[0])
return result
except:
logging.exception("Exception executing command %s for zone %s" % (repr(command), repr(zone)))
return None
def getCommands(self):
""" API: Returns the list of supported commands. For now it also limits this
list depending on the type. This is less than ideal, but for now
this is how it's done.
"""
ret = {}
'''
for handler in self.handlers:
for cmd in handler['commands']:
ret
'''
for c in self.COMMAND_HANDLER:
# Do not expose certain commands
if self.COMMAND_HANDLER[c]["type"] > CommandType.LIMIT_GETCOMMANDS:
continue
ret[c] = {"name": "", "description": ""}
if "name" in self.COMMAND_HANDLER[c]:
ret[c]["name"] = self.COMMAND_HANDLER[c]["name"]
if "description" in self.COMMAND_HANDLER[c]:
ret[c]["description"] = self.COMMAND_HANDLER[c]["description"]
ret[c]["type"] = self.COMMAND_HANDLER[c]["type"]
return ret
def sendEvent(self, eventType, eventSource, eventData, zone=None):
# self.events.notify(None, {"type":"zone", "source" : remote, "data": {"zone" : zone, "inuse" : True}})
self.eventManager.notify(zone, {"type":eventType, "source":eventSource, "data":eventData})
| gpl-2.0 |
reinis-martinsons/gyb-frp | pyasn1/type/univ.py | 86 | 44623 | # ASN.1 "universal" data types
import operator, sys, math
from pyasn1.type import base, tag, constraint, namedtype, namedval, tagmap
from pyasn1.codec.ber import eoo
from pyasn1.compat import octets
from pyasn1 import error
# "Simple" ASN.1 types (yet incomplete)
class Integer(base.AbstractSimpleAsn1Item):
tagSet = baseTagSet = tag.initTagSet(
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x02)
)
namedValues = namedval.NamedValues()
def __init__(self, value=None, tagSet=None, subtypeSpec=None,
namedValues=None):
if namedValues is None:
self.__namedValues = self.namedValues
else:
self.__namedValues = namedValues
base.AbstractSimpleAsn1Item.__init__(
self, value, tagSet, subtypeSpec
)
def __repr__(self):
if self.__namedValues is not self.namedValues:
return '%s, %r)' % (base.AbstractSimpleAsn1Item.__repr__(self)[:-1], self.__namedValues)
else:
return base.AbstractSimpleAsn1Item.__repr__(self)
def __and__(self, value): return self.clone(self._value & value)
def __rand__(self, value): return self.clone(value & self._value)
def __or__(self, value): return self.clone(self._value | value)
def __ror__(self, value): return self.clone(value | self._value)
def __xor__(self, value): return self.clone(self._value ^ value)
def __rxor__(self, value): return self.clone(value ^ self._value)
def __lshift__(self, value): return self.clone(self._value << value)
def __rshift__(self, value): return self.clone(self._value >> value)
def __add__(self, value): return self.clone(self._value + value)
def __radd__(self, value): return self.clone(value + self._value)
def __sub__(self, value): return self.clone(self._value - value)
def __rsub__(self, value): return self.clone(value - self._value)
def __mul__(self, value): return self.clone(self._value * value)
def __rmul__(self, value): return self.clone(value * self._value)
def __mod__(self, value): return self.clone(self._value % value)
def __rmod__(self, value): return self.clone(value % self._value)
def __pow__(self, value, modulo=None): return self.clone(pow(self._value, value, modulo))
def __rpow__(self, value): return self.clone(pow(value, self._value))
if sys.version_info[0] <= 2:
def __div__(self, value): return self.clone(self._value // value)
def __rdiv__(self, value): return self.clone(value // self._value)
else:
def __truediv__(self, value): return self.clone(self._value / value)
def __rtruediv__(self, value): return self.clone(value / self._value)
def __divmod__(self, value): return self.clone(self._value // value)
def __rdivmod__(self, value): return self.clone(value // self._value)
__hash__ = base.AbstractSimpleAsn1Item.__hash__
def __int__(self): return int(self._value)
if sys.version_info[0] <= 2:
def __long__(self): return long(self._value)
def __float__(self): return float(self._value)
def __abs__(self): return self.clone(abs(self._value))
def __index__(self): return int(self._value)
def __pos__(self): return self.clone(+self._value)
def __neg__(self): return self.clone(-self._value)
def __invert__(self): return self.clone(~self._value)
def __round__(self, n=0):
r = round(self._value, n)
if n:
return self.clone(r)
else:
return r
def __floor__(self): return math.floor(self._value)
def __ceil__(self): return math.ceil(self._value)
if sys.version_info[0:2] > (2, 5):
def __trunc__(self): return self.clone(math.trunc(self._value))
def __lt__(self, value): return self._value < value
def __le__(self, value): return self._value <= value
def __eq__(self, value): return self._value == value
def __ne__(self, value): return self._value != value
def __gt__(self, value): return self._value > value
def __ge__(self, value): return self._value >= value
def prettyIn(self, value):
if not isinstance(value, str):
try:
return int(value)
except:
raise error.PyAsn1Error(
'Can\'t coerce %r into integer: %s' % (value, sys.exc_info()[1])
)
r = self.__namedValues.getValue(value)
if r is not None:
return r
try:
return int(value)
except:
raise error.PyAsn1Error(
'Can\'t coerce %r into integer: %s' % (value, sys.exc_info()[1])
)
def prettyOut(self, value):
r = self.__namedValues.getName(value)
return r is None and str(value) or repr(r)
def getNamedValues(self): return self.__namedValues
def clone(self, value=None, tagSet=None, subtypeSpec=None,
namedValues=None):
if value is None and tagSet is None and subtypeSpec is None \
and namedValues is None:
return self
if value is None:
value = self._value
if tagSet is None:
tagSet = self._tagSet
if subtypeSpec is None:
subtypeSpec = self._subtypeSpec
if namedValues is None:
namedValues = self.__namedValues
return self.__class__(value, tagSet, subtypeSpec, namedValues)
def subtype(self, value=None, implicitTag=None, explicitTag=None,
subtypeSpec=None, namedValues=None):
if value is None:
value = self._value
if implicitTag is not None:
tagSet = self._tagSet.tagImplicitly(implicitTag)
elif explicitTag is not None:
tagSet = self._tagSet.tagExplicitly(explicitTag)
else:
tagSet = self._tagSet
if subtypeSpec is None:
subtypeSpec = self._subtypeSpec
else:
subtypeSpec = subtypeSpec + self._subtypeSpec
if namedValues is None:
namedValues = self.__namedValues
else:
namedValues = namedValues + self.__namedValues
return self.__class__(value, tagSet, subtypeSpec, namedValues)
class Boolean(Integer):
tagSet = baseTagSet = tag.initTagSet(
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x01),
)
subtypeSpec = Integer.subtypeSpec+constraint.SingleValueConstraint(0,1)
namedValues = Integer.namedValues.clone(('False', 0), ('True', 1))
class BitString(base.AbstractSimpleAsn1Item):
tagSet = baseTagSet = tag.initTagSet(
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x03)
)
namedValues = namedval.NamedValues()
def __init__(self, value=None, tagSet=None, subtypeSpec=None,
namedValues=None):
if namedValues is None:
self.__namedValues = self.namedValues
else:
self.__namedValues = namedValues
base.AbstractSimpleAsn1Item.__init__(
self, value, tagSet, subtypeSpec
)
def clone(self, value=None, tagSet=None, subtypeSpec=None,
namedValues=None):
if value is None and tagSet is None and subtypeSpec is None \
and namedValues is None:
return self
if value is None:
value = self._value
if tagSet is None:
tagSet = self._tagSet
if subtypeSpec is None:
subtypeSpec = self._subtypeSpec
if namedValues is None:
namedValues = self.__namedValues
return self.__class__(value, tagSet, subtypeSpec, namedValues)
def subtype(self, value=None, implicitTag=None, explicitTag=None,
subtypeSpec=None, namedValues=None):
if value is None:
value = self._value
if implicitTag is not None:
tagSet = self._tagSet.tagImplicitly(implicitTag)
elif explicitTag is not None:
tagSet = self._tagSet.tagExplicitly(explicitTag)
else:
tagSet = self._tagSet
if subtypeSpec is None:
subtypeSpec = self._subtypeSpec
else:
subtypeSpec = subtypeSpec + self._subtypeSpec
if namedValues is None:
namedValues = self.__namedValues
else:
namedValues = namedValues + self.__namedValues
return self.__class__(value, tagSet, subtypeSpec, namedValues)
def __str__(self): return str(tuple(self))
# Immutable sequence object protocol
def __len__(self):
if self._len is None:
self._len = len(self._value)
return self._len
def __getitem__(self, i):
if isinstance(i, slice):
return self.clone(operator.getitem(self._value, i))
else:
return self._value[i]
def __add__(self, value): return self.clone(self._value + value)
def __radd__(self, value): return self.clone(value + self._value)
def __mul__(self, value): return self.clone(self._value * value)
def __rmul__(self, value): return self * value
def prettyIn(self, value):
r = []
if not value:
return ()
elif isinstance(value, str):
if value[0] == '\'':
if value[-2:] == '\'B':
for v in value[1:-2]:
if v == '0':
r.append(0)
elif v == '1':
r.append(1)
else:
raise error.PyAsn1Error(
'Non-binary BIT STRING initializer %s' % (v,)
)
return tuple(r)
elif value[-2:] == '\'H':
for v in value[1:-2]:
i = 4
v = int(v, 16)
while i:
i = i - 1
r.append((v>>i)&0x01)
return tuple(r)
else:
raise error.PyAsn1Error(
'Bad BIT STRING value notation %s' % (value,)
)
else:
for i in value.split(','):
j = self.__namedValues.getValue(i)
if j is None:
raise error.PyAsn1Error(
'Unknown bit identifier \'%s\'' % (i,)
)
if j >= len(r):
r.extend([0]*(j-len(r)+1))
r[j] = 1
return tuple(r)
elif isinstance(value, (tuple, list)):
r = tuple(value)
for b in r:
if b and b != 1:
raise error.PyAsn1Error(
'Non-binary BitString initializer \'%s\'' % (r,)
)
return r
elif isinstance(value, BitString):
return tuple(value)
else:
raise error.PyAsn1Error(
'Bad BitString initializer type \'%s\'' % (value,)
)
def prettyOut(self, value):
return '\"\'%s\'B\"' % ''.join([str(x) for x in value])
try:
all
except NameError: # Python 2.4
def all(iterable):
for element in iterable:
if not element:
return False
return True
class OctetString(base.AbstractSimpleAsn1Item):
tagSet = baseTagSet = tag.initTagSet(
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x04)
)
defaultBinValue = defaultHexValue = base.noValue
encoding = 'us-ascii'
def __init__(self, value=None, tagSet=None, subtypeSpec=None,
encoding=None, binValue=None, hexValue=None):
if encoding is None:
self._encoding = self.encoding
else:
self._encoding = encoding
if binValue is not None:
value = self.fromBinaryString(binValue)
if hexValue is not None:
value = self.fromHexString(hexValue)
if value is None or value is base.noValue:
value = self.defaultHexValue
if value is None or value is base.noValue:
value = self.defaultBinValue
self.__asNumbersCache = None
base.AbstractSimpleAsn1Item.__init__(self, value, tagSet, subtypeSpec)
def clone(self, value=None, tagSet=None, subtypeSpec=None,
encoding=None, binValue=None, hexValue=None):
if value is None and tagSet is None and subtypeSpec is None and \
encoding is None and binValue is None and hexValue is None:
return self
if value is None and binValue is None and hexValue is None:
value = self._value
if tagSet is None:
tagSet = self._tagSet
if subtypeSpec is None:
subtypeSpec = self._subtypeSpec
if encoding is None:
encoding = self._encoding
return self.__class__(
value, tagSet, subtypeSpec, encoding, binValue, hexValue
)
if sys.version_info[0] <= 2:
def prettyIn(self, value):
if isinstance(value, str):
return value
elif isinstance(value, unicode):
try:
return value.encode(self._encoding)
except (LookupError, UnicodeEncodeError):
raise error.PyAsn1Error(
'Can\'t encode string \'%s\' with \'%s\' codec' % (value, self._encoding)
)
elif isinstance(value, (tuple, list)):
try:
return ''.join([ chr(x) for x in value ])
except ValueError:
raise error.PyAsn1Error(
'Bad OctetString initializer \'%s\'' % (value,)
)
else:
return str(value)
else:
def prettyIn(self, value):
if isinstance(value, bytes):
return value
elif isinstance(value, str):
try:
return value.encode(self._encoding)
except UnicodeEncodeError:
raise error.PyAsn1Error(
'Can\'t encode string \'%s\' with \'%s\' codec' % (value, self._encoding)
)
elif isinstance(value, OctetString):
return value.asOctets()
elif isinstance(value, (tuple, list, map)):
try:
return bytes(value)
except ValueError:
raise error.PyAsn1Error(
'Bad OctetString initializer \'%s\'' % (value,)
)
else:
try:
return str(value).encode(self._encoding)
except UnicodeEncodeError:
raise error.PyAsn1Error(
'Can\'t encode string \'%s\' with \'%s\' codec' % (value, self._encoding)
)
def fromBinaryString(self, value):
bitNo = 8; byte = 0; r = ()
for v in value:
if bitNo:
bitNo = bitNo - 1
else:
bitNo = 7
r = r + (byte,)
byte = 0
if v == '0':
v = 0
elif v == '1':
v = 1
else:
raise error.PyAsn1Error(
'Non-binary OCTET STRING initializer %s' % (v,)
)
byte = byte | (v << bitNo)
return octets.ints2octs(r + (byte,))
def fromHexString(self, value):
r = p = ()
for v in value:
if p:
r = r + (int(p+v, 16),)
p = ()
else:
p = v
if p:
r = r + (int(p+'0', 16),)
return octets.ints2octs(r)
def prettyOut(self, value):
if sys.version_info[0] <= 2:
numbers = tuple(( ord(x) for x in value ))
else:
numbers = tuple(value)
if all(x >= 32 and x <= 126 for x in numbers):
return str(value)
else:
return '0x' + ''.join(( '%.2x' % x for x in numbers ))
def __repr__(self):
r = []
doHex = False
if self._value is not self.defaultValue:
for x in self.asNumbers():
if x < 32 or x > 126:
doHex = True
break
if not doHex:
r.append('%r' % (self._value,))
if self._tagSet is not self.tagSet:
r.append('tagSet=%r' % (self._tagSet,))
if self._subtypeSpec is not self.subtypeSpec:
r.append('subtypeSpec=%r' % (self._subtypeSpec,))
if self.encoding is not self._encoding:
r.append('encoding=%r' % (self._encoding,))
if doHex:
r.append('hexValue=%r' % ''.join([ '%.2x' % x for x in self.asNumbers() ]))
return '%s(%s)' % (self.__class__.__name__, ', '.join(r))
if sys.version_info[0] <= 2:
def __str__(self): return str(self._value)
def __unicode__(self):
return self._value.decode(self._encoding, 'ignore')
def asOctets(self): return self._value
def asNumbers(self):
if self.__asNumbersCache is None:
self.__asNumbersCache = tuple([ ord(x) for x in self._value ])
return self.__asNumbersCache
else:
def __str__(self): return self._value.decode(self._encoding, 'ignore')
def __bytes__(self): return self._value
def asOctets(self): return self._value
def asNumbers(self):
if self.__asNumbersCache is None:
self.__asNumbersCache = tuple(self._value)
return self.__asNumbersCache
# Immutable sequence object protocol
def __len__(self):
if self._len is None:
self._len = len(self._value)
return self._len
def __getitem__(self, i):
if isinstance(i, slice):
return self.clone(operator.getitem(self._value, i))
else:
return self._value[i]
def __add__(self, value): return self.clone(self._value + self.prettyIn(value))
def __radd__(self, value): return self.clone(self.prettyIn(value) + self._value)
def __mul__(self, value): return self.clone(self._value * value)
def __rmul__(self, value): return self * value
def __int__(self): return int(self._value)
def __float__(self): return float(self._value)
class Null(OctetString):
defaultValue = ''.encode() # This is tightly constrained
tagSet = baseTagSet = tag.initTagSet(
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x05)
)
subtypeSpec = OctetString.subtypeSpec+constraint.SingleValueConstraint(''.encode())
if sys.version_info[0] <= 2:
intTypes = (int, long)
else:
intTypes = (int,)
numericTypes = intTypes + (float,)
class ObjectIdentifier(base.AbstractSimpleAsn1Item):
tagSet = baseTagSet = tag.initTagSet(
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x06)
)
def __add__(self, other): return self.clone(self._value + other)
def __radd__(self, other): return self.clone(other + self._value)
def asTuple(self): return self._value
# Sequence object protocol
def __len__(self):
if self._len is None:
self._len = len(self._value)
return self._len
def __getitem__(self, i):
if isinstance(i, slice):
return self.clone(
operator.getitem(self._value, i)
)
else:
return self._value[i]
def __str__(self): return self.prettyPrint()
def __repr__(self):
return '%s(%r)' % (self.__class__.__name__, self.prettyPrint())
def index(self, suboid): return self._value.index(suboid)
def isPrefixOf(self, value):
"""Returns true if argument OID resides deeper in the OID tree"""
l = len(self)
if l <= len(value):
if self._value[:l] == value[:l]:
return 1
return 0
def prettyIn(self, value):
"""Dotted -> tuple of numerics OID converter"""
if isinstance(value, tuple):
pass
elif isinstance(value, ObjectIdentifier):
return tuple(value)
elif octets.isStringType(value):
r = []
for element in [ x for x in value.split('.') if x != '' ]:
try:
r.append(int(element, 0))
except ValueError:
raise error.PyAsn1Error(
'Malformed Object ID %s at %s: %s' %
(str(value), self.__class__.__name__, sys.exc_info()[1])
)
value = tuple(r)
else:
try:
value = tuple(value)
except TypeError:
raise error.PyAsn1Error(
'Malformed Object ID %s at %s: %s' %
(str(value), self.__class__.__name__,sys.exc_info()[1])
)
for x in value:
if not isinstance(x, intTypes) or x < 0:
raise error.PyAsn1Error(
'Invalid sub-ID in %s at %s' % (value, self.__class__.__name__)
)
return value
def prettyOut(self, value): return '.'.join([ str(x) for x in value ])
class Real(base.AbstractSimpleAsn1Item):
binEncBase = None # binEncBase = 16 is recommended for large numbers
try:
_plusInf = float('inf')
_minusInf = float('-inf')
_inf = (_plusInf, _minusInf)
except ValueError:
# Infinity support is platform and Python dependent
_plusInf = _minusInf = None
_inf = ()
tagSet = baseTagSet = tag.initTagSet(
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x09)
)
def __normalizeBase10(self, value):
m, b, e = value
while m and m % 10 == 0:
m = m / 10
e = e + 1
return m, b, e
def prettyIn(self, value):
if isinstance(value, tuple) and len(value) == 3:
if not isinstance(value[0], numericTypes) or \
not isinstance(value[1], intTypes) or \
not isinstance(value[2], intTypes):
raise error.PyAsn1Error('Lame Real value syntax: %s' % (value,))
if isinstance(value[0], float) and \
self._inf and value[0] in self._inf:
return value[0]
if value[1] not in (2, 10):
raise error.PyAsn1Error(
'Prohibited base for Real value: %s' % (value[1],)
)
if value[1] == 10:
value = self.__normalizeBase10(value)
return value
elif isinstance(value, intTypes):
return self.__normalizeBase10((value, 10, 0))
elif isinstance(value, (str, float)):
if isinstance(value, str):
try:
value = float(value)
except ValueError:
raise error.PyAsn1Error(
'Bad real value syntax: %s' % (value,)
)
if self._inf and value in self._inf:
return value
else:
e = 0
while int(value) != value:
value = value * 10
e = e - 1
return self.__normalizeBase10((int(value), 10, e))
elif isinstance(value, Real):
return tuple(value)
raise error.PyAsn1Error(
'Bad real value syntax: %s' % (value,)
)
def prettyOut(self, value):
if value in self._inf:
return '\'%s\'' % value
else:
return str(value)
def prettyPrint(self, scope=0):
if self.isInfinity():
return self.prettyOut(self._value)
else:
return str(float(self))
def isPlusInfinity(self): return self._value == self._plusInf
def isMinusInfinity(self): return self._value == self._minusInf
def isInfinity(self): return self._value in self._inf
def __str__(self): return str(float(self))
def __add__(self, value): return self.clone(float(self) + value)
def __radd__(self, value): return self + value
def __mul__(self, value): return self.clone(float(self) * value)
def __rmul__(self, value): return self * value
def __sub__(self, value): return self.clone(float(self) - value)
def __rsub__(self, value): return self.clone(value - float(self))
def __mod__(self, value): return self.clone(float(self) % value)
def __rmod__(self, value): return self.clone(value % float(self))
def __pow__(self, value, modulo=None): return self.clone(pow(float(self), value, modulo))
def __rpow__(self, value): return self.clone(pow(value, float(self)))
if sys.version_info[0] <= 2:
def __div__(self, value): return self.clone(float(self) / value)
def __rdiv__(self, value): return self.clone(value / float(self))
else:
def __truediv__(self, value): return self.clone(float(self) / value)
def __rtruediv__(self, value): return self.clone(value / float(self))
def __divmod__(self, value): return self.clone(float(self) // value)
def __rdivmod__(self, value): return self.clone(value // float(self))
def __int__(self): return int(float(self))
if sys.version_info[0] <= 2:
def __long__(self): return long(float(self))
def __float__(self):
if self._value in self._inf:
return self._value
else:
return float(
self._value[0] * pow(self._value[1], self._value[2])
)
def __abs__(self): return self.clone(abs(float(self)))
def __pos__(self): return self.clone(+float(self))
def __neg__(self): return self.clone(-float(self))
def __round__(self, n=0):
r = round(float(self), n)
if n:
return self.clone(r)
else:
return r
def __floor__(self): return self.clone(math.floor(float(self)))
def __ceil__(self): return self.clone(math.ceil(float(self)))
if sys.version_info[0:2] > (2, 5):
def __trunc__(self): return self.clone(math.trunc(float(self)))
def __lt__(self, value): return float(self) < value
def __le__(self, value): return float(self) <= value
def __eq__(self, value): return float(self) == value
def __ne__(self, value): return float(self) != value
def __gt__(self, value): return float(self) > value
def __ge__(self, value): return float(self) >= value
if sys.version_info[0] <= 2:
def __nonzero__(self): return bool(float(self))
else:
def __bool__(self): return bool(float(self))
__hash__ = base.AbstractSimpleAsn1Item.__hash__
def __getitem__(self, idx):
if self._value in self._inf:
raise error.PyAsn1Error('Invalid infinite value operation')
else:
return self._value[idx]
class Enumerated(Integer):
tagSet = baseTagSet = tag.initTagSet(
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x0A)
)
# "Structured" ASN.1 types
class SetOf(base.AbstractConstructedAsn1Item):
componentType = None
tagSet = baseTagSet = tag.initTagSet(
tag.Tag(tag.tagClassUniversal, tag.tagFormatConstructed, 0x11)
)
typeId = 1
strictConstraints = False
def _cloneComponentValues(self, myClone, cloneValueFlag):
idx = 0; l = len(self._componentValues)
while idx < l:
c = self._componentValues[idx]
if c is not None:
if isinstance(c, base.AbstractConstructedAsn1Item):
myClone.setComponentByPosition(
idx, c.clone(cloneValueFlag=cloneValueFlag)
)
else:
myClone.setComponentByPosition(idx, c.clone())
idx = idx + 1
def _verifyComponent(self, idx, value):
t = self._componentType
if t is None:
return
if not t.isSameTypeWith(value,matchConstraints=self.strictConstraints):
raise error.PyAsn1Error('Component value is tag-incompatible: %r vs %r' % (value, t))
if self.strictConstraints and \
not t.isSuperTypeOf(value, matchTags=False):
raise error.PyAsn1Error('Component value is constraints-incompatible: %r vs %r' % (value, t))
def getComponentByPosition(self, idx): return self._componentValues[idx]
def setComponentByPosition(self, idx, value=None, verifyConstraints=True):
l = len(self._componentValues)
if idx >= l:
self._componentValues = self._componentValues + (idx-l+1)*[None]
if value is None:
if self._componentValues[idx] is None:
if self._componentType is None:
raise error.PyAsn1Error('Component type not defined')
self._componentValues[idx] = self._componentType.clone()
self._componentValuesSet = self._componentValuesSet + 1
return self
elif not isinstance(value, base.Asn1Item):
if self._componentType is None:
raise error.PyAsn1Error('Component type not defined')
if isinstance(self._componentType, base.AbstractSimpleAsn1Item):
value = self._componentType.clone(value=value)
else:
raise error.PyAsn1Error('Instance value required')
if verifyConstraints:
if self._componentType is not None:
self._verifyComponent(idx, value)
self._verifySubtypeSpec(value, idx)
if self._componentValues[idx] is None:
self._componentValuesSet = self._componentValuesSet + 1
self._componentValues[idx] = value
return self
def getComponentTagMap(self):
if self._componentType is not None:
return self._componentType.getTagMap()
def prettyPrint(self, scope=0):
scope = scope + 1
r = self.__class__.__name__ + ':\n'
for idx in range(len(self._componentValues)):
r = r + ' '*scope
if self._componentValues[idx] is None:
r = r + '<empty>'
else:
r = r + self._componentValues[idx].prettyPrint(scope)
return r
def prettyPrintType(self, scope=0):
scope = scope + 1
r = '%s -> %s {\n' % (self.getTagSet(), self.__class__.__name__)
if self._componentType is not None:
r = r + ' '*scope
r = r + self._componentType.prettyPrintType(scope)
return r + '\n' + ' '*(scope-1) + '}'
class SequenceOf(SetOf):
tagSet = baseTagSet = tag.initTagSet(
tag.Tag(tag.tagClassUniversal, tag.tagFormatConstructed, 0x10)
)
typeId = 2
class SequenceAndSetBase(base.AbstractConstructedAsn1Item):
componentType = namedtype.NamedTypes()
strictConstraints = False
def __init__(self, componentType=None, tagSet=None,
subtypeSpec=None, sizeSpec=None):
if componentType is None:
componentType = self.componentType
base.AbstractConstructedAsn1Item.__init__(
self, componentType.clone(), tagSet, subtypeSpec, sizeSpec
)
self._componentTypeLen = len(self._componentType)
def __getitem__(self, idx):
if isinstance(idx, str):
return self.getComponentByName(idx)
else:
return base.AbstractConstructedAsn1Item.__getitem__(self, idx)
def __setitem__(self, idx, value):
if isinstance(idx, str):
self.setComponentByName(idx, value)
else:
base.AbstractConstructedAsn1Item.__setitem__(self, idx, value)
def _cloneComponentValues(self, myClone, cloneValueFlag):
idx = 0; l = len(self._componentValues)
while idx < l:
c = self._componentValues[idx]
if c is not None:
if isinstance(c, base.AbstractConstructedAsn1Item):
myClone.setComponentByPosition(
idx, c.clone(cloneValueFlag=cloneValueFlag)
)
else:
myClone.setComponentByPosition(idx, c.clone())
idx = idx + 1
def _verifyComponent(self, idx, value):
if idx >= self._componentTypeLen:
raise error.PyAsn1Error(
'Component type error out of range'
)
t = self._componentType[idx].getType()
if not t.isSameTypeWith(value,matchConstraints=self.strictConstraints):
raise error.PyAsn1Error('Component value is tag-incompatible: %r vs %r' % (value, t))
if self.strictConstraints and \
not t.isSuperTypeOf(value, matchTags=False):
raise error.PyAsn1Error('Component value is constraints-incompatible: %r vs %r' % (value, t))
def getComponentByName(self, name):
return self.getComponentByPosition(
self._componentType.getPositionByName(name)
)
def setComponentByName(self, name, value=None, verifyConstraints=True):
return self.setComponentByPosition(
self._componentType.getPositionByName(name),value,verifyConstraints
)
def getComponentByPosition(self, idx):
try:
return self._componentValues[idx]
except IndexError:
if idx < self._componentTypeLen:
return
raise
def setComponentByPosition(self, idx, value=None,
verifyConstraints=True,
exactTypes=False,
matchTags=True,
matchConstraints=True):
l = len(self._componentValues)
if idx >= l:
self._componentValues = self._componentValues + (idx-l+1)*[None]
if value is None:
if self._componentValues[idx] is None:
self._componentValues[idx] = self._componentType.getTypeByPosition(idx).clone()
self._componentValuesSet = self._componentValuesSet + 1
return self
elif not isinstance(value, base.Asn1Item):
t = self._componentType.getTypeByPosition(idx)
if isinstance(t, base.AbstractSimpleAsn1Item):
value = t.clone(value=value)
else:
raise error.PyAsn1Error('Instance value required')
if verifyConstraints:
if self._componentTypeLen:
self._verifyComponent(idx, value)
self._verifySubtypeSpec(value, idx)
if self._componentValues[idx] is None:
self._componentValuesSet = self._componentValuesSet + 1
self._componentValues[idx] = value
return self
def getNameByPosition(self, idx):
if self._componentTypeLen:
return self._componentType.getNameByPosition(idx)
def getDefaultComponentByPosition(self, idx):
if self._componentTypeLen and self._componentType[idx].isDefaulted:
return self._componentType[idx].getType()
def getComponentType(self):
if self._componentTypeLen:
return self._componentType
def setDefaultComponents(self):
if self._componentTypeLen == self._componentValuesSet:
return
idx = self._componentTypeLen
while idx:
idx = idx - 1
if self._componentType[idx].isDefaulted:
if self.getComponentByPosition(idx) is None:
self.setComponentByPosition(idx)
elif not self._componentType[idx].isOptional:
if self.getComponentByPosition(idx) is None:
raise error.PyAsn1Error(
'Uninitialized component #%s at %r' % (idx, self)
)
def prettyPrint(self, scope=0):
scope = scope + 1
r = self.__class__.__name__ + ':\n'
for idx in range(len(self._componentValues)):
if self._componentValues[idx] is not None:
r = r + ' '*scope
componentType = self.getComponentType()
if componentType is None:
r = r + '<no-name>'
else:
r = r + componentType.getNameByPosition(idx)
r = '%s=%s\n' % (
r, self._componentValues[idx].prettyPrint(scope)
)
return r
def prettyPrintType(self, scope=0):
scope = scope + 1
r = '%s -> %s {\n' % (self.getTagSet(), self.__class__.__name__)
for idx in range(len(self.componentType)):
r = r + ' '*scope
r = r + '"%s"' % self.componentType.getNameByPosition(idx)
r = '%s = %s\n' % (
r, self._componentType.getTypeByPosition(idx).prettyPrintType(scope)
)
return r + '\n' + ' '*(scope-1) + '}'
class Sequence(SequenceAndSetBase):
tagSet = baseTagSet = tag.initTagSet(
tag.Tag(tag.tagClassUniversal, tag.tagFormatConstructed, 0x10)
)
typeId = 3
def getComponentTagMapNearPosition(self, idx):
if self._componentType:
return self._componentType.getTagMapNearPosition(idx)
def getComponentPositionNearType(self, tagSet, idx):
if self._componentType:
return self._componentType.getPositionNearType(tagSet, idx)
else:
return idx
class Set(SequenceAndSetBase):
tagSet = baseTagSet = tag.initTagSet(
tag.Tag(tag.tagClassUniversal, tag.tagFormatConstructed, 0x11)
)
typeId = 4
def getComponent(self, innerFlag=0): return self
def getComponentByType(self, tagSet, innerFlag=0):
c = self.getComponentByPosition(
self._componentType.getPositionByType(tagSet)
)
if innerFlag and isinstance(c, Set):
# get inner component by inner tagSet
return c.getComponent(1)
else:
# get outer component by inner tagSet
return c
def setComponentByType(self, tagSet, value=None, innerFlag=0,
verifyConstraints=True):
idx = self._componentType.getPositionByType(tagSet)
t = self._componentType.getTypeByPosition(idx)
if innerFlag: # set inner component by inner tagSet
if t.getTagSet():
return self.setComponentByPosition(
idx, value, verifyConstraints
)
else:
t = self.setComponentByPosition(idx).getComponentByPosition(idx)
return t.setComponentByType(
tagSet, value, innerFlag, verifyConstraints
)
else: # set outer component by inner tagSet
return self.setComponentByPosition(
idx, value, verifyConstraints
)
def getComponentTagMap(self):
if self._componentType:
return self._componentType.getTagMap(True)
def getComponentPositionByType(self, tagSet):
if self._componentType:
return self._componentType.getPositionByType(tagSet)
class Choice(Set):
tagSet = baseTagSet = tag.TagSet() # untagged
sizeSpec = constraint.ConstraintsIntersection(
constraint.ValueSizeConstraint(1, 1)
)
typeId = 5
_currentIdx = None
def __eq__(self, other):
if self._componentValues:
return self._componentValues[self._currentIdx] == other
return NotImplemented
def __ne__(self, other):
if self._componentValues:
return self._componentValues[self._currentIdx] != other
return NotImplemented
def __lt__(self, other):
if self._componentValues:
return self._componentValues[self._currentIdx] < other
return NotImplemented
def __le__(self, other):
if self._componentValues:
return self._componentValues[self._currentIdx] <= other
return NotImplemented
def __gt__(self, other):
if self._componentValues:
return self._componentValues[self._currentIdx] > other
return NotImplemented
def __ge__(self, other):
if self._componentValues:
return self._componentValues[self._currentIdx] >= other
return NotImplemented
if sys.version_info[0] <= 2:
def __nonzero__(self): return bool(self._componentValues)
else:
def __bool__(self): return bool(self._componentValues)
def __len__(self): return self._currentIdx is not None and 1 or 0
def verifySizeSpec(self):
if self._currentIdx is None:
raise error.PyAsn1Error('Component not chosen')
else:
self._sizeSpec(' ')
def _cloneComponentValues(self, myClone, cloneValueFlag):
try:
c = self.getComponent()
except error.PyAsn1Error:
pass
else:
if isinstance(c, Choice):
tagSet = c.getEffectiveTagSet()
else:
tagSet = c.getTagSet()
if isinstance(c, base.AbstractConstructedAsn1Item):
myClone.setComponentByType(
tagSet, c.clone(cloneValueFlag=cloneValueFlag)
)
else:
myClone.setComponentByType(tagSet, c.clone())
def setComponentByPosition(self, idx, value=None, verifyConstraints=True):
l = len(self._componentValues)
if idx >= l:
self._componentValues = self._componentValues + (idx-l+1)*[None]
if self._currentIdx is not None:
self._componentValues[self._currentIdx] = None
if value is None:
if self._componentValues[idx] is None:
self._componentValues[idx] = self._componentType.getTypeByPosition(idx).clone()
self._componentValuesSet = 1
self._currentIdx = idx
return self
elif not isinstance(value, base.Asn1Item):
value = self._componentType.getTypeByPosition(idx).clone(
value=value
)
if verifyConstraints:
if self._componentTypeLen:
self._verifyComponent(idx, value)
self._verifySubtypeSpec(value, idx)
self._componentValues[idx] = value
self._currentIdx = idx
self._componentValuesSet = 1
return self
def getMinTagSet(self):
if self._tagSet:
return self._tagSet
else:
return self._componentType.genMinTagSet()
def getEffectiveTagSet(self):
if self._tagSet:
return self._tagSet
else:
c = self.getComponent()
if isinstance(c, Choice):
return c.getEffectiveTagSet()
else:
return c.getTagSet()
def getTagMap(self):
if self._tagSet:
return Set.getTagMap(self)
else:
return Set.getComponentTagMap(self)
def getComponent(self, innerFlag=0):
if self._currentIdx is None:
raise error.PyAsn1Error('Component not chosen')
else:
c = self._componentValues[self._currentIdx]
if innerFlag and isinstance(c, Choice):
return c.getComponent(innerFlag)
else:
return c
def getName(self, innerFlag=0):
if self._currentIdx is None:
raise error.PyAsn1Error('Component not chosen')
else:
if innerFlag:
c = self._componentValues[self._currentIdx]
if isinstance(c, Choice):
return c.getName(innerFlag)
return self._componentType.getNameByPosition(self._currentIdx)
def setDefaultComponents(self): pass
class Any(OctetString):
tagSet = baseTagSet = tag.TagSet() # untagged
typeId = 6
def getTagMap(self):
return tagmap.TagMap(
{ self.getTagSet(): self },
{ eoo.endOfOctets.getTagSet(): eoo.endOfOctets },
self
)
# XXX
# coercion rules?
| apache-2.0 |
datenbetrieb/odoo | openerp/addons/test_inherit/models.py | 91 | 3456 | # -*- coding: utf-8 -*-
from openerp import models, fields, api, osv
# We just create a new model
class mother(models.Model):
_name = 'test.inherit.mother'
_columns = {
# check interoperability of field inheritance with old-style fields
'name': osv.fields.char('Name'),
'state': osv.fields.selection([('a', 'A'), ('b', 'B')], string='State'),
}
_defaults = {
'name': 'Foo',
}
surname = fields.Char(compute='_compute_surname')
@api.one
@api.depends('name')
def _compute_surname(self):
self.surname = self.name or ''
# We want to inherits from the parent model and we add some fields
# in the child object
class daughter(models.Model):
_name = 'test.inherit.daughter'
template_id = fields.Many2one('test.inherit.mother', 'Template',
delegate=True, required=True, ondelete='cascade')
field_in_daughter = fields.Char('Field1')
# We add a new field in the parent object. Because of a recent refactoring,
# this feature was broken.
# This test and these models try to show the bug and fix it.
class mother(models.Model):
_inherit = 'test.inherit.mother'
field_in_mother = fields.Char()
partner_id = fields.Many2one('res.partner')
# extend the name field: make it required and change its default value
name = fields.Char(required=True, default='Bar')
# extend the selection of the state field
state = fields.Selection(selection_add=[('c', 'C')])
# override the computed field, and extend its dependencies
@api.one
@api.depends('field_in_mother')
def _compute_surname(self):
if self.field_in_mother:
self.surname = self.field_in_mother
else:
super(mother, self)._compute_surname()
class mother(models.Model):
_inherit = 'test.inherit.mother'
# extend again the selection of the state field
state = fields.Selection(selection_add=[('d', 'D')])
class daughter(models.Model):
_inherit = 'test.inherit.daughter'
# simply redeclare the field without adding any option
template_id = fields.Many2one()
# change the default value of an inherited field
name = fields.Char(default='Baz')
class res_partner(models.Model):
_inherit = 'res.partner'
# define a one2many field based on the inherited field partner_id
daughter_ids = fields.One2many('test.inherit.daughter', 'partner_id')
# Check the overriding of property fields by non-property fields.
# Contribution by Adrien Peiffer (ACSONE).
class test_inherit_property(models.Model):
_name = 'test.inherit.property'
_columns = {
'name': osv.fields.char('Name', required=True),
'property_foo': osv.fields.property(string='Foo', type='integer'),
'property_bar': osv.fields.property(string='Bar', type='integer'),
}
class test_inherit_property(models.Model):
_inherit = 'test.inherit.property'
@api.multi
def _get_foo(self, name, arg):
return dict.fromkeys(self.ids, 42)
_columns = {
# override property_foo with an old-api function field
'property_foo': osv.fields.function(_get_foo, type='integer', string='Foo'),
}
# override property_bar with a new-api computed field
property_bar = fields.Integer(compute='_compute_bar', company_dependent=False)
@api.multi
def _compute_bar(self):
for record in self:
record.property_bar = 42
| agpl-3.0 |
2014cdag4/2014cdag4 | wsgi/static/Brython2.1.0-20140419-113919/Lib/multiprocessing/dummy/connection.py | 707 | 3049 | #
# Analogue of `multiprocessing.connection` which uses queues instead of sockets
#
# multiprocessing/dummy/connection.py
#
# Copyright (c) 2006-2008, R Oudkerk
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of author nor the names of any contributors may be
# used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
# OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
#
__all__ = [ 'Client', 'Listener', 'Pipe' ]
from queue import Queue
families = [None]
class Listener(object):
def __init__(self, address=None, family=None, backlog=1):
self._backlog_queue = Queue(backlog)
def accept(self):
return Connection(*self._backlog_queue.get())
def close(self):
self._backlog_queue = None
address = property(lambda self: self._backlog_queue)
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, exc_tb):
self.close()
def Client(address):
_in, _out = Queue(), Queue()
address.put((_out, _in))
return Connection(_in, _out)
def Pipe(duplex=True):
a, b = Queue(), Queue()
return Connection(a, b), Connection(b, a)
class Connection(object):
def __init__(self, _in, _out):
self._out = _out
self._in = _in
self.send = self.send_bytes = _out.put
self.recv = self.recv_bytes = _in.get
def poll(self, timeout=0.0):
if self._in.qsize() > 0:
return True
if timeout <= 0.0:
return False
self._in.not_empty.acquire()
self._in.not_empty.wait(timeout)
self._in.not_empty.release()
return self._in.qsize() > 0
def close(self):
pass
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, exc_tb):
self.close()
| gpl-2.0 |
popazerty/obh-gui | lib/python/Plugins/SystemPlugins/SoftwareManager/BackupRestore.py | 17 | 12974 | from Screens.Screen import Screen
from Screens.MessageBox import MessageBox
from Screens.Console import Console
from Components.ActionMap import ActionMap, NumberActionMap
from Components.Pixmap import Pixmap
from Components.Label import Label
from Components.Sources.StaticText import StaticText
from Components.MenuList import MenuList
from Components.config import getConfigListEntry, configfile, ConfigSelection, ConfigSubsection, ConfigText, ConfigLocations
from Components.config import config
from Components.ConfigList import ConfigList,ConfigListScreen
from Components.FileList import MultiFileSelectList
from Plugins.Plugin import PluginDescriptor
from enigma import eTimer, eEnv, eEPGCache
from Tools.Directories import *
from os import popen, path, makedirs, listdir, access, stat, rename, remove, W_OK, R_OK
from time import gmtime, strftime, localtime
from datetime import date
config.plugins.configurationbackup = ConfigSubsection()
config.plugins.configurationbackup.backuplocation = ConfigText(default = '/media/hdd/', visible_width = 50, fixed_size = False)
config.plugins.configurationbackup.backupdirs = ConfigLocations(default=[eEnv.resolve('${sysconfdir}/enigma2/'), '/etc/network/interfaces', '/etc/wpa_supplicant.conf', '/etc/wpa_supplicant.ath0.conf', '/etc/wpa_supplicant.wlan0.conf', '/etc/resolv.conf', '/etc/default_gw', '/etc/hostname'])
def getBackupPath():
backuppath = config.plugins.configurationbackup.backuplocation.value
if backuppath.endswith('/'):
return backuppath + 'backup'
else:
return backuppath + '/backup'
def getBackupFilename():
return "enigma2settingsbackup.tar.gz"
class BackupScreen(Screen, ConfigListScreen):
skin = """
<screen position="135,144" size="350,310" title="Backup is running" >
<widget name="config" position="10,10" size="330,250" transparent="1" scrollbarMode="showOnDemand" />
</screen>"""
def __init__(self, session, runBackup = False):
Screen.__init__(self, session)
self.session = session
self.runBackup = runBackup
self["actions"] = ActionMap(["WizardActions", "DirectionActions"],
{
"ok": self.close,
"back": self.close,
"cancel": self.close,
}, -1)
self.finished_cb = None
self.backuppath = getBackupPath()
self.backupfile = getBackupFilename()
self.fullbackupfilename = self.backuppath + "/" + self.backupfile
self.list = []
ConfigListScreen.__init__(self, self.list)
self.onLayoutFinish.append(self.layoutFinished)
if self.runBackup:
self.onShown.append(self.doBackup)
def layoutFinished(self):
self.setWindowTitle()
def setWindowTitle(self):
self.setTitle(_("Backup is running..."))
def doBackup(self):
configfile.save()
if config.plugins.softwaremanager.epgcache.value:
eEPGCache.getInstance().save()
try:
if (path.exists(self.backuppath) == False):
makedirs(self.backuppath)
self.backupdirs = ' '.join( config.plugins.configurationbackup.backupdirs.value )
if path.exists(self.fullbackupfilename):
dt = str(date.fromtimestamp(stat(self.fullbackupfilename).st_ctime))
self.newfilename = self.backuppath + "/" + dt + '-' + self.backupfile
if path.exists(self.newfilename):
remove(self.newfilename)
rename(self.fullbackupfilename,self.newfilename)
if self.finished_cb:
self.session.openWithCallback(self.finished_cb, Console, title = _("Backup is running..."), cmdlist = ["tar -czvf " + self.fullbackupfilename + " " + self.backupdirs],finishedCallback = self.backupFinishedCB,closeOnSuccess = True)
else:
self.session.open(Console, title = _("Backup is running..."), cmdlist = ["tar -czvf " + self.fullbackupfilename + " " + self.backupdirs],finishedCallback = self.backupFinishedCB, closeOnSuccess = True)
except OSError:
if self.finished_cb:
self.session.openWithCallback(self.finished_cb, MessageBox, _("Sorry, your backup destination is not writeable.\nPlease select a different one."), MessageBox.TYPE_INFO, timeout = 10 )
else:
self.session.openWithCallback(self.backupErrorCB,MessageBox, _("Sorry, your backup destination is not writeable.\nPlease select a different one."), MessageBox.TYPE_INFO, timeout = 10 )
def backupFinishedCB(self,retval = None):
self.close(True)
def backupErrorCB(self,retval = None):
self.close(False)
def runAsync(self, finished_cb):
self.finished_cb = finished_cb
self.doBackup()
class BackupSelection(Screen):
skin = """
<screen name="BackupSelection" position="center,center" size="560,400" title="Select files/folders to backup">
<ePixmap pixmap="skin_default/buttons/red.png" position="0,0" size="140,40" alphatest="on" />
<ePixmap pixmap="skin_default/buttons/green.png" position="140,0" size="140,40" alphatest="on" />
<ePixmap pixmap="skin_default/buttons/yellow.png" position="280,0" size="140,40" alphatest="on" />
<widget source="key_red" render="Label" position="0,0" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#9f1313" transparent="1" />
<widget source="key_green" render="Label" position="140,0" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#1f771f" transparent="1" />
<widget source="key_yellow" render="Label" position="280,0" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#a08500" transparent="1" />
<widget name="checkList" position="5,50" size="550,250" transparent="1" scrollbarMode="showOnDemand" />
</screen>"""
def __init__(self, session):
Screen.__init__(self, session)
self["key_red"] = StaticText(_("Cancel"))
self["key_green"] = StaticText(_("Save"))
self["key_yellow"] = StaticText()
self.selectedFiles = config.plugins.configurationbackup.backupdirs.value
defaultDir = '/'
inhibitDirs = ["/bin", "/boot", "/dev", "/autofs", "/lib", "/proc", "/sbin", "/sys", "/hdd", "/tmp", "/mnt", "/media"]
self.filelist = MultiFileSelectList(self.selectedFiles, defaultDir, inhibitDirs = inhibitDirs )
self["checkList"] = self.filelist
self["actions"] = ActionMap(["DirectionActions", "OkCancelActions", "ShortcutActions"],
{
"cancel": self.exit,
"red": self.exit,
"yellow": self.changeSelectionState,
"green": self.saveSelection,
"ok": self.okClicked,
"left": self.left,
"right": self.right,
"down": self.down,
"up": self.up
}, -1)
if not self.selectionChanged in self["checkList"].onSelectionChanged:
self["checkList"].onSelectionChanged.append(self.selectionChanged)
self.onLayoutFinish.append(self.layoutFinished)
def layoutFinished(self):
idx = 0
self["checkList"].moveToIndex(idx)
self.setWindowTitle()
self.selectionChanged()
def setWindowTitle(self):
self.setTitle(_("Select files/folders to backup"))
def selectionChanged(self):
current = self["checkList"].getCurrent()[0]
if current[2] is True:
self["key_yellow"].setText(_("Deselect"))
else:
self["key_yellow"].setText(_("Select"))
def up(self):
self["checkList"].up()
def down(self):
self["checkList"].down()
def left(self):
self["checkList"].pageUp()
def right(self):
self["checkList"].pageDown()
def changeSelectionState(self):
self["checkList"].changeSelectionState()
self.selectedFiles = self["checkList"].getSelectedList()
def saveSelection(self):
self.selectedFiles = self["checkList"].getSelectedList()
config.plugins.configurationbackup.backupdirs.value = self.selectedFiles
config.plugins.configurationbackup.backupdirs.save()
config.plugins.configurationbackup.save()
config.save()
self.close(None)
def exit(self):
self.close(None)
def okClicked(self):
if self.filelist.canDescent():
self.filelist.descent()
class RestoreMenu(Screen):
skin = """
<screen name="RestoreMenu" position="center,center" size="560,400" title="Restore backups" >
<ePixmap pixmap="skin_default/buttons/red.png" position="0,0" size="140,40" alphatest="on" />
<ePixmap pixmap="skin_default/buttons/green.png" position="140,0" size="140,40" alphatest="on" />
<ePixmap pixmap="skin_default/buttons/yellow.png" position="280,0" size="140,40" alphatest="on" />
<widget source="key_red" render="Label" position="0,0" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#9f1313" transparent="1" />
<widget source="key_green" render="Label" position="140,0" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#1f771f" transparent="1" />
<widget source="key_yellow" render="Label" position="280,0" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#a08500" transparent="1" />
<widget name="filelist" position="5,50" size="550,230" scrollbarMode="showOnDemand" />
</screen>"""
def __init__(self, session, plugin_path):
Screen.__init__(self, session)
self.skin_path = plugin_path
self["key_red"] = StaticText(_("Cancel"))
self["key_green"] = StaticText(_("Restore"))
self["key_yellow"] = StaticText(_("Delete"))
self.sel = []
self.val = []
self.entry = False
self.exe = False
self.path = ""
self["actions"] = NumberActionMap(["SetupActions"],
{
"ok": self.KeyOk,
"cancel": self.keyCancel
}, -1)
self["shortcuts"] = ActionMap(["ShortcutActions"],
{
"red": self.keyCancel,
"green": self.KeyOk,
"yellow": self.deleteFile,
})
self.flist = []
self["filelist"] = MenuList(self.flist)
self.fill_list()
self.onLayoutFinish.append(self.layoutFinished)
def layoutFinished(self):
self.setWindowTitle()
def setWindowTitle(self):
self.setTitle(_("Restore backups"))
def fill_list(self):
self.flist = []
self.path = getBackupPath()
if (path.exists(self.path) == False):
makedirs(self.path)
for file in listdir(self.path):
if (file.endswith(".tar.gz")):
self.flist.append((file))
self.entry = True
self.flist.sort(reverse=True)
self["filelist"].l.setList(self.flist)
def KeyOk(self):
if (self.exe == False) and (self.entry == True):
self.sel = self["filelist"].getCurrent()
if self.sel:
self.val = self.path + "/" + self.sel
self.session.openWithCallback(self.startRestore, MessageBox, _("Are you sure you want to restore\nthe following backup:\n%s\nYour receiver will restart after the backup has been restored!") % (self.sel))
def keyCancel(self):
self.close()
def startRestore(self, ret = False):
if (ret == True):
self.exe = True
self.session.open(Console, title = _("Restoring..."), cmdlist = ["tar -xzvf " + self.path + "/" + self.sel + " -C /", "killall -9 enigma2"])
def deleteFile(self):
if (self.exe == False) and (self.entry == True):
self.sel = self["filelist"].getCurrent()
if self.sel:
self.val = self.path + "/" + self.sel
self.session.openWithCallback(self.startDelete, MessageBox, _("Are you sure you want to delete\nthe following backup:\n") + self.sel)
def startDelete(self, ret = False):
if (ret == True):
self.exe = True
print "removing:",self.val
if (path.exists(self.val) == True):
remove(self.val)
self.exe = False
self.fill_list()
class RestoreScreen(Screen, ConfigListScreen):
skin = """
<screen position="135,144" size="350,310" title="Restore is running..." >
<widget name="config" position="10,10" size="330,250" transparent="1" scrollbarMode="showOnDemand" />
</screen>"""
def __init__(self, session, runRestore = False):
Screen.__init__(self, session)
self.session = session
self.runRestore = runRestore
self["actions"] = ActionMap(["WizardActions", "DirectionActions"],
{
"ok": self.close,
"back": self.close,
"cancel": self.close,
}, -1)
self.finished_cb = None
self.backuppath = getBackupPath()
self.backupfile = getBackupFilename()
self.fullbackupfilename = self.backuppath + "/" + self.backupfile
self.list = []
ConfigListScreen.__init__(self, self.list)
self.onLayoutFinish.append(self.layoutFinished)
if self.runRestore:
self.onShown.append(self.doRestore)
def layoutFinished(self):
self.setWindowTitle()
def setWindowTitle(self):
self.setTitle(_("Restoring..."))
def doRestore(self):
if path.exists("/proc/stb/vmpeg/0/dst_width"):
restorecmdlist = ["tar -xzvf " + self.fullbackupfilename + " -C /", "echo 0 > /proc/stb/vmpeg/0/dst_height", "echo 0 > /proc/stb/vmpeg/0/dst_left", "echo 0 > /proc/stb/vmpeg/0/dst_top", "echo 0 > /proc/stb/vmpeg/0/dst_width", "killall -9 enigma2"]
else:
restorecmdlist = ["tar -xzvf " + self.fullbackupfilename + " -C /", "killall -9 enigma2"]
if self.finished_cb:
self.session.openWithCallback(self.finished_cb, Console, title = _("Restoring..."), cmdlist = restorecmdlist)
else:
self.session.open(Console, title = _("Restoring..."), cmdlist = restorecmdlist)
def backupFinishedCB(self,retval = None):
self.close(True)
def backupErrorCB(self,retval = None):
self.close(False)
def runAsync(self, finished_cb):
self.finished_cb = finished_cb
self.doRestore()
| gpl-2.0 |
mlperf/training_results_v0.5 | v0.5.0/google/cloud_v2.512/resnet-tpuv2-512/code/resnet/model/models/official/mnist/dataset.py | 7 | 4116 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""tf.data.Dataset interface to the MNIST dataset."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import gzip
import os
import shutil
import tempfile
import numpy as np
from six.moves import urllib
import tensorflow as tf
def read32(bytestream):
"""Read 4 bytes from bytestream as an unsigned 32-bit integer."""
dt = np.dtype(np.uint32).newbyteorder('>')
return np.frombuffer(bytestream.read(4), dtype=dt)[0]
def check_image_file_header(filename):
"""Validate that filename corresponds to images for the MNIST dataset."""
with tf.gfile.Open(filename, 'rb') as f:
magic = read32(f)
read32(f) # num_images, unused
rows = read32(f)
cols = read32(f)
if magic != 2051:
raise ValueError('Invalid magic number %d in MNIST file %s' % (magic,
f.name))
if rows != 28 or cols != 28:
raise ValueError(
'Invalid MNIST file %s: Expected 28x28 images, found %dx%d' %
(f.name, rows, cols))
def check_labels_file_header(filename):
"""Validate that filename corresponds to labels for the MNIST dataset."""
with tf.gfile.Open(filename, 'rb') as f:
magic = read32(f)
read32(f) # num_items, unused
if magic != 2049:
raise ValueError('Invalid magic number %d in MNIST file %s' % (magic,
f.name))
def download(directory, filename):
"""Download (and unzip) a file from the MNIST dataset if not already done."""
filepath = os.path.join(directory, filename)
if tf.gfile.Exists(filepath):
return filepath
if not tf.gfile.Exists(directory):
tf.gfile.MakeDirs(directory)
# CVDF mirror of http://yann.lecun.com/exdb/mnist/
url = 'https://storage.googleapis.com/cvdf-datasets/mnist/' + filename + '.gz'
_, zipped_filepath = tempfile.mkstemp(suffix='.gz')
print('Downloading %s to %s' % (url, zipped_filepath))
urllib.request.urlretrieve(url, zipped_filepath)
with gzip.open(zipped_filepath, 'rb') as f_in, \
tf.gfile.Open(filepath, 'wb') as f_out:
shutil.copyfileobj(f_in, f_out)
os.remove(zipped_filepath)
return filepath
def dataset(directory, images_file, labels_file):
"""Download and parse MNIST dataset."""
images_file = download(directory, images_file)
labels_file = download(directory, labels_file)
check_image_file_header(images_file)
check_labels_file_header(labels_file)
def decode_image(image):
# Normalize from [0, 255] to [0.0, 1.0]
image = tf.decode_raw(image, tf.uint8)
image = tf.cast(image, tf.float32)
image = tf.reshape(image, [784])
return image / 255.0
def decode_label(label):
label = tf.decode_raw(label, tf.uint8) # tf.string -> [tf.uint8]
label = tf.reshape(label, []) # label is a scalar
return tf.to_int32(label)
images = tf.data.FixedLengthRecordDataset(
images_file, 28 * 28, header_bytes=16).map(decode_image)
labels = tf.data.FixedLengthRecordDataset(
labels_file, 1, header_bytes=8).map(decode_label)
return tf.data.Dataset.zip((images, labels))
def train(directory):
"""tf.data.Dataset object for MNIST training data."""
return dataset(directory, 'train-images-idx3-ubyte',
'train-labels-idx1-ubyte')
def test(directory):
"""tf.data.Dataset object for MNIST test data."""
return dataset(directory, 't10k-images-idx3-ubyte', 't10k-labels-idx1-ubyte')
| apache-2.0 |
chshu/openthread | tools/harness-automation/cases_R140/fed_9_2_13.py | 18 | 1873 | #!/usr/bin/env python
#
# Copyright (c) 2016, The OpenThread Authors.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
import unittest
from autothreadharness.harness_case import HarnessCase
class FED_9_2_13(HarnessCase):
role = HarnessCase.ROLE_FED
case = '9 2 13'
golden_devices_required = 5
def on_dialog(self, dialog, title):
pass
if __name__ == '__main__':
unittest.main()
| bsd-3-clause |
educloudalliance/eca-auth-data | authdata/tests/test_datasources.py | 1 | 21342 |
# -*- coding: utf-8 -*-
# The MIT License (MIT)
#
# Copyright (c) 2014-2015 Haltu Oy, http://haltu.fi
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# pylint: disable=locally-disabled, no-member, protected-access
import base64
import mock
import requests
from django.test import TestCase
from django.test import RequestFactory
from django.test import override_settings
from authdata import models
from authdata.datasources.base import ExternalDataSource
import authdata.datasources.dreamschool
import authdata.datasources.ldap_base
import authdata.datasources.oulu
AUTH_EXTERNAL_SOURCES = {
'ldap_test': ['authdata.datasources.ldap_base', 'TestLDAPDataSource', {
'host': 'ldaps://1.2.3.4',
'username': 'uid=foo,ou=Bar,dc=zap,dc=csc,dc=fi',
'password': 'password'
}],
'dreamschool': ['authdata.datasources.dreamschool', 'DreamschoolDataSource', {
'api_url': 'https://foo.fi/api/2/user/',
'username': 'username',
'password': 'password',
}],
}
AUTH_EXTERNAL_ATTRIBUTE_BINDING = {
'ldap_test': 'ldap_test',
'dreamschool': 'dreamschool',
}
AUTH_EXTERNAL_MUNICIPALITY_BINDING = {
'Foo': 'ldap_test',
'Bar': 'dreamschool',
}
AUTHDATA_DREAMSCHOOL_ORG_MAP = {
u'bar': {u'school1': 3, u'äö school': 1},
}
class TestExternalDataSource(TestCase):
def setUp(self):
self.o = ExternalDataSource()
def test_init(self):
self.assertTrue(self.o)
def test_provision_user(self):
obj = self.o
obj.external_source = 'foo'
obj.provision_user(oid='oid', external_id='foo')
self.assertEqual(models.User.objects.filter(username='oid').count(), 1)
self.assertEqual(models.Source.objects.filter(name='local').count(), 1)
self.assertEqual(models.Attribute.objects.count(), 1)
self.assertEqual(models.UserAttribute.objects.count(), 1)
def test_oid(self):
with self.assertRaises(NotImplementedError):
self.o.get_oid(username='foo')
def test_data(self):
with self.assertRaises(NotImplementedError):
self.o.get_data(external_id='foo')
def test_user_data(self):
with self.assertRaises(NotImplementedError):
self.o.get_user_data(request='foo')
@override_settings(AUTH_EXTERNAL_SOURCES=AUTH_EXTERNAL_SOURCES)
@override_settings(AUTH_EXTERNAL_ATTRIBUTE_BINDING=AUTH_EXTERNAL_ATTRIBUTE_BINDING)
@override_settings(AUTH_EXTERNAL_MUNICIPALITY_BINDING=AUTH_EXTERNAL_MUNICIPALITY_BINDING)
@override_settings(AUTHDATA_DREAMSCHOOL_ORG_MAP=AUTHDATA_DREAMSCHOOL_ORG_MAP)
class TestDreamschoolDataSource(TestCase):
def setUp(self):
self.o = authdata.datasources.dreamschool.DreamschoolDataSource(api_url='mock://foo',
username='foo', password='bar')
authdata.datasources.dreamschool.requests = mock.Mock()
authdata.datasources.dreamschool.requests.codes = requests.codes
data = {'objects': [
{'id': 123,
'username': 'user',
'first_name': 'first',
'last_name': 'last',
'roles': [
{
'permissions': [{
'code': authdata.datasources.dreamschool.TEACHER_PERM,
}],
'organisation': {'id': 1},
},
],
'user_groups': [
{
'organisation': {
'id': 1,
'title': 'Äö school',
},
'title': 'Group1',
},
],
}]
}
self.data = data
response_mock = mock.Mock()
response_mock.status_code = requests.codes.ok
response_mock.json.return_value = data
authdata.datasources.dreamschool.requests.get.return_value = response_mock
self.factory = RequestFactory()
def test_init(self):
self.assertTrue(self.o)
def test_oid(self):
oid = self.o.get_oid(username='foo')
self.assertTrue(oid.startswith('MPASSOID'))
self.assertEqual(len(oid), 30)
def test_user_data(self):
d = {'municipality': 'Bar', 'school': 'school1', 'group': 'Group1'}
request = self.factory.get('/foo', d)
data = self.o.get_user_data(request=request)
self.assertEqual(data['count'], 1)
self.assertEqual(data['next'], None)
self.assertEqual(data['previous'], None)
self.assertEqual(data['results'][0]['attributes'], [])
self.assertEqual(data['results'][0]['first_name'], 'first')
self.assertEqual(data['results'][0]['last_name'], 'last')
self.assertEqual(data['results'][0]['username'], 'MPASSOID.ea5f9ca03f6edf5a0409d')
roles = list(data['results'][0]['roles'])
expected_roles = [
{
'school': 'Äö school',
'role': 'teacher',
'group': 'Group1',
'municipality': u'Bar'
},
]
self.assertEqual(roles, expected_roles)
def test_user_data_api_fail(self):
response_mock = mock.Mock()
response_mock.status_code = 500
response_mock.json.return_value = self.data
authdata.datasources.dreamschool.requests.get.return_value = response_mock
d = {'municipality': 'Bar', 'school': 'school1', 'group': 'Group1'}
request = self.factory.get('/foo', d)
data = self.o.get_user_data(request=request)
self.assertEqual(data['count'], 0)
self.assertEqual(data['next'], None)
self.assertEqual(data['previous'], None)
self.assertEqual(data['results'], [])
def test_user_data_api_parse_json_fail(self):
response_mock = mock.Mock()
response_mock.status_code = 200
response_mock.json.side_effect = ValueError('foo')
authdata.datasources.dreamschool.requests.get.return_value = response_mock
d = {'municipality': 'Bar', 'school': 'school1', 'group': 'Group1'}
request = self.factory.get('/foo', d)
data = self.o.get_user_data(request=request)
self.assertEqual(data['count'], 0)
self.assertEqual(data['next'], None)
self.assertEqual(data['previous'], None)
self.assertEqual(data['results'], [])
def test_get_municipality_by_org_id(self):
org_id = 1
municipality = self.o._get_municipality_by_org_id(org_id)
self.assertEqual(municipality, u'Bar')
@override_settings(AUTHDATA_DREAMSCHOOL_ORG_MAP={})
def test_get_municipality_by_org_id_not_in_settings(self):
org_id = 1
municipality = self.o._get_municipality_by_org_id(org_id)
self.assertEqual(municipality, u'')
def test_get_roles_from_userdata_student(self):
userdata = {
'roles': [
{
'permissions': [{'code': 'foo'}],
'organisation': {'id': 1},
},
],
'user_groups': [
{
'organisation': {
'id': 1,
'title': 'Äö school',
},
'title': 'Group1',
},
],
}
roles = list(self.o._get_roles(userdata))
expected_roles = [
{
"school": "Äö school",
"role": "student",
"group": "Group1",
"municipality": u"Bar"
},
]
self.assertEqual(roles, expected_roles)
def test_get_roles_from_userdata_teacher(self):
userdata = {
'roles': [
{
'permissions': [{
'code': authdata.datasources.dreamschool.TEACHER_PERM,
}],
'organisation': {'id': 1},
},
],
'user_groups': [
{
'organisation': {
'id': 1,
'title': 'Äö school',
},
'title': 'Group1',
},
],
}
roles = list(self.o._get_roles(userdata))
expected_roles = [
{
'school': 'Äö school',
'role': 'teacher',
'group': 'Group1',
'municipality': u'Bar'
},
]
self.assertEqual(roles, expected_roles)
def test_get_org_id_not_configured(self):
municipality = ''
school = ''
self.assertFalse(self.o._get_org_id(municipality, school))
def test_get_org_id(self):
municipality = u'Bar'
school = u'äö school'
expected_org_id = 1
org_id = self.o._get_org_id(municipality=municipality, school=school)
self.assertEqual(org_id, expected_org_id)
municipality = u'Foo'
school = u'äö school'
org_id = self.o._get_org_id(municipality=municipality, school=school)
self.assertEqual(org_id, None)
municipality = u'Bar'
school = u'school1'
expected_org_id = 3
org_id = self.o._get_org_id(municipality=municipality, school=school)
self.assertEqual(org_id, expected_org_id)
def test_get_data(self):
external_id = '123'
data = {
'id': 123,
'username': 'User',
'first_name': 'First',
'last_name': 'Last',
'roles': [
{
'permissions': [{
'code': authdata.datasources.dreamschool.TEACHER_PERM,
}],
'organisation': {'id': 1},
},
],
'user_groups': [
{
'organisation': {
'id': 1,
'title': 'Äö school',
},
'title': 'Group1',
},
],
}
response_mock = mock.Mock()
response_mock.status_code = requests.codes.ok
response_mock.json.return_value = data
authdata.datasources.dreamschool.requests.get.return_value = response_mock
data = self.o.get_data(external_id=external_id)
data['roles'] = list(data['roles'])
expected_data = {
'attributes': [],
'username': 'MPASSOID.08153889bda7b8ffd5a4d',
'first_name': 'First',
'last_name': 'Last',
'roles': [{
'school': 'Äö school',
'role': 'teacher',
'group': 'Group1',
'municipality': u'Bar'
}],
}
self.assertEqual(data, expected_data)
def test_get_data_api_fail(self):
external_id = '123'
data = {
'id': 123,
'username': 'User',
'first_name': 'First',
'last_name': 'Last',
'roles': [
{
'permissions': [{
'code': authdata.datasources.dreamschool.TEACHER_PERM,
}],
'organisation': {'id': 1},
},
],
'user_groups': [
{
'organisation': {
'id': 1,
'title': 'Äö school',
},
'title': 'Group1',
},
],
}
response_mock = mock.Mock()
response_mock.status_code = 500
response_mock.json.return_value = data
authdata.datasources.dreamschool.requests.get.return_value = response_mock
data = self.o.get_data(external_id=external_id)
self.assertEqual(data, None)
def test_get_data_json_parse_fail(self):
external_id = '123'
data = {
'id': 123,
'username': 'User',
'first_name': 'First',
'last_name': 'Last',
'roles': [
{
'permissions': [{
'code': authdata.datasources.dreamschool.TEACHER_PERM,
}],
'organisation': {'id': 1},
},
],
'user_groups': [
{
'organisation': {
'id': 1,
'title': 'Äö school',
},
'title': 'Group1',
},
],
}
response_mock = mock.Mock()
response_mock.status_code = 200
response_mock.json.side_effect = ValueError('foo')
authdata.datasources.dreamschool.requests.get.return_value = response_mock
data = self.o.get_data(external_id=external_id)
self.assertEqual(data, None)
class TestLDAPDataSource(TestCase):
def setUp(self):
self.obj = authdata.datasources.ldap_base.LDAPDataSource(host='host',
username='foo', password='bar', external_source='foo')
self.obj.ldap = mock.Mock()
def test_init(self):
self.assertTrue(self.obj)
self.assertEqual(self.obj.external_source, 'foo')
def test_connect(self):
self.obj.connect()
def test_query(self):
self.obj.query(query_filter=None)
def test_get_municipality_id(self):
muni_id = self.obj.get_municipality_id(name='foo')
self.assertEqual(muni_id, 'foo')
self.obj.municipality_id_map = {'a': '123'}
muni_id = self.obj.get_municipality_id(name='a')
self.assertEqual(muni_id, '123')
def test_get_school_id(self):
muni_id = self.obj.get_school_id(name='foo')
self.assertEqual(muni_id, 'foo')
self.obj.school_id_map = {'a': '123'}
muni_id = self.obj.get_school_id(name='a')
self.assertEqual(muni_id, '123')
class TestLdapTest(TestCase):
def setUp(self):
self.obj = authdata.datasources.ldap_base.TestLDAPDataSource(host='host',
username='foo', password='bar', external_source='foo')
self.obj.ldap = mock.Mock()
def test_init(self):
self.assertTrue(self.obj)
self.assertEqual(self.obj.external_source, 'foo')
def test_school_id_map(self):
name = u'Ääkkös abc 123'
mapper = self.obj.school_id_map()
self.assertEqual('00123', mapper.get(name))
def test_oid(self):
username = 'abc-123'
expected_oid = 'MPASSOID.c5af545a6479eb503ce5d'
oid = self.obj.get_oid(username)
self.assertEqual(oid, expected_oid)
self.assertEqual(len(oid), 30)
def test_get_data_index_error(self):
with mock.patch.object(self.obj, 'query') as mock_query:
mock_query.side_effect = IndexError('foo')
data = self.obj.get_data(external_id=123)
self.assertEqual(data, None)
def test_get_data(self):
self.assertFalse(authdata.models.User.objects.count())
r = [(
'cn=bar,ou=Opettajat,ou=People,ou=LdapKoulu1,ou=KuntaYksi,dc=mpass-test,dc=csc,dc=fi',
{'cn': ['bar'],
'givenName': ['First'],
'mail': ['bar@mpass-test.invalid'],
'objectClass': ['top', 'inetOrgPerson'],
'sn': ['Opettaja10013'],
'title': ['Opettaja'],
'uid': ['bar'],
'userPassword': ['foo'],
'departmentNumber': ['Group1'],
}
)]
with mock.patch.object(self.obj, 'query', return_value=r):
query_result = self.obj.get_data(external_id=123)
expected_data = {
'username': 'MPASSOID.c38029f36d3aebd850cfb',
'last_name': 'Opettaja10013',
'first_name': 'First',
'roles': [
{
'group': 'Group1',
'municipality': 'KuntaYksi',
'role': 'Opettaja',
'school': 'LdapKoulu1',
}],
'attributes': [],
}
self.assertEqual(query_result, expected_data)
# User is provisioned
self.assertEquals(authdata.models.User.objects.count(), 1)
def test_get_user_data(self):
self.assertFalse(authdata.models.User.objects.count())
r = [(
'cn=bar,ou=Opettajat,ou=People,ou=LdapKoulu1,ou=KuntaYksi,dc=mpass-test,dc=csc,dc=fi',
{'cn': ['bar'],
'givenName': ['First'],
'mail': ['bar@mpass-test.invalid'],
'objectClass': ['top', 'inetOrgPerson'],
'sn': ['Opettaja10013'],
'title': ['Opettaja'],
'uid': ['bar'],
'userPassword': ['foo'],
'departmentNumber': ['Group1'],
}
)]
mock_request = mock.Mock()
mock_request.GET = {'school': u'Ääkkösschool', 'group': u'Ääkköskoulu'}
with mock.patch.object(self.obj, 'query', return_value=r):
query_result = self.obj.get_user_data(request=mock_request)
expected_data = {
'count': 1,
'next': None,
'previous': None,
'results': [{'attributes': [],
'first_name': 'First',
'last_name': 'Opettaja10013',
'roles': [{'group': 'Group1',
'municipality': '1234567-8',
'role': 'Opettaja',
'school': '00001'}],
'username': 'MPASSOID.c38029f36d3aebd850cfb'}]
}
self.assertEqual(query_result, expected_data)
# User is provisioned
self.assertEquals(authdata.models.User.objects.count(), 1)
class TestOuluLDAPDataSource(TestCase):
def setUp(self):
self.obj = authdata.datasources.oulu.OuluLDAPDataSource(base_dn='base',
host='host', username='foo', password='bar', external_source='foo')
self.obj.ldap = mock.Mock()
self.q_results = [(
'cn=bar,ou=Opettajat,ou=People,ou=LdapKoulu1,ou=KuntaYksi,dc=mpass-test,dc=csc,dc=fi',
{'cn': ['bar'],
'givenName': ['First'],
'mail': ['bar@mpass-test.invalid'],
'objectClass': ['top', 'inetOrgPerson'],
'sn': ['Last'],
'title': ['Opettaja'],
'uid': ['uid1'],
'userPassword': ['password1'],
'department': ['Group1'],
'objectGUID': ['username1'],
'physicalDeliveryOfficeName': ['School1'],
}
)]
def test_init(self):
self.assertTrue(self.obj)
self.assertEqual(self.obj.external_source, 'foo')
def test_school_id_map(self):
self.assertEqual(self.obj.school_id_map.get(u'Ääkkös koulu 123'), None)
self.assertEqual(self.obj.school_id_map.get(u'Herukan koulu'), '06347')
def test_connect(self):
self.obj.connect()
def test_oid(self):
username = 'abc-123'
expected_oid = 'MPASSOID.1a1786a2133f1751de913'
oid = self.obj.get_oid(username)
self.assertEqual(oid, expected_oid)
self.assertEqual(len(oid), 30)
def test_external_id(self):
query_result = ('foo', {})
with self.assertRaises(KeyError):
self.obj.get_external_id(query_result)
result = self.obj.get_external_id(query_result=self.q_results[0])
self.assertEqual(result, 'uid1')
def test_username(self):
result = self.obj.get_username(query_result=self.q_results[0])
self.assertEqual(result, 'username1')
def test_first_name(self):
result = self.obj.get_first_name(query_result=self.q_results[0])
self.assertEqual(result, 'First')
def test_last_name(self):
result = self.obj.get_last_name(query_result=self.q_results[0])
self.assertEqual(result, 'Last')
def test_get_municipality(self):
result = self.obj.get_municipality()
self.assertEqual(result, 'Oulu')
def test_school(self):
result = self.obj.get_school(query_result=self.q_results[0])
self.assertEqual(result, 'School1')
def test_role(self):
result = self.obj.get_role(query_result=self.q_results[0])
self.assertEqual(result, 'Opettaja')
def test_group(self):
result = self.obj.get_group(query_result=self.q_results[0])
self.assertEqual(result, 'Group1')
def test_get_data_index_error(self):
username = base64.b64encode('username1')
with mock.patch.object(self.obj, 'query') as mock_query:
mock_query.side_effect = IndexError('foo')
data = self.obj.get_data(external_id=username)
self.assertEqual(data, None)
def test_get_data(self):
self.assertFalse(authdata.models.User.objects.count())
username = base64.b64encode('username1')
with mock.patch.object(self.obj, 'query', return_value=self.q_results):
query_result = self.obj.get_data(external_id=username)
expected_data = {
'username': 'MPASSOID.b51110b8d091b6792abde',
'last_name': 'Last',
'first_name': 'First',
'roles': [
{
'group': 'Group1',
'municipality': '0187690-1',
'role': 'Opettaja',
'school': 'School1',
}],
'attributes': [],
}
self.assertEqual(query_result, expected_data)
# User is provisioned
self.assertEquals(authdata.models.User.objects.count(), 1)
def test_get_user_data(self):
self.assertFalse(authdata.models.User.objects.count())
mock_request = mock.Mock()
mock_request.GET = {'school': u'Ääkkösschool', 'group': u'Ääkköskoulu'}
with mock.patch.object(self.obj, 'query', return_value=self.q_results):
query_result = self.obj.get_user_data(request=mock_request)
expected_data = {
'count': 1,
'next': None,
'previous': None,
'results': [
{'username': 'MPASSOID.b51110b8d091b6792abde',
'last_name': 'Last',
'first_name': 'First',
'roles': [
{
'group': 'Group1',
'municipality': '0187690-1',
'role': 'Opettaja',
'school': 'School1',
}],
'attributes': [],
}
]
}
self.assertEqual(query_result, expected_data)
# User is provisioned
self.assertEquals(authdata.models.User.objects.count(), 1)
# vim: tabstop=2 expandtab shiftwidth=2 softtabstop=2
| mit |
springmerchant/pybbm | pybb/admin.py | 7 | 4809 | # -*- coding: utf-8
from __future__ import unicode_literals
from django.utils.translation import ugettext_lazy as _
from django.contrib import admin
from django.core.urlresolvers import reverse
from pybb.models import Category, Forum, Topic, Post, Profile, Attachment, PollAnswer
from pybb import compat, util
username_field = compat.get_username_field()
class ForumInlineAdmin(admin.TabularInline):
model = Forum
fields = ['name', 'hidden', 'position']
extra = 0
class CategoryAdmin(admin.ModelAdmin):
prepopulated_fields = {'slug': ('name',)}
list_display = ['name', 'position', 'hidden', 'forum_count']
list_per_page = 20
ordering = ['position']
search_fields = ['name']
list_editable = ['position']
inlines = [ForumInlineAdmin]
class ForumAdmin(admin.ModelAdmin):
prepopulated_fields = {'slug': ('name',)}
list_display = ['name', 'category', 'hidden', 'position', 'topic_count', ]
list_per_page = 20
raw_id_fields = ['moderators']
ordering = ['-category']
search_fields = ['name', 'category__name']
list_editable = ['position', 'hidden']
fieldsets = (
(None, {
'fields': ('category', 'parent', 'name', 'hidden', 'position', )
}
),
(_('Additional options'), {
'classes': ('collapse',),
'fields': ('updated', 'description', 'headline', 'post_count', 'moderators', 'slug')
}
),
)
class PollAnswerAdmin(admin.TabularInline):
model = PollAnswer
fields = ['text', ]
extra = 0
class TopicAdmin(admin.ModelAdmin):
prepopulated_fields = {'slug': ('name',)}
list_display = ['name', 'forum', 'created', 'head', 'post_count', 'poll_type',]
list_per_page = 20
raw_id_fields = ['user', 'subscribers']
ordering = ['-created']
date_hierarchy = 'created'
search_fields = ['name']
fieldsets = (
(None, {
'fields': ('forum', 'name', 'user', ('created', 'updated'), 'poll_type',)
}
),
(_('Additional options'), {
'classes': ('collapse',),
'fields': (('views', 'post_count'), ('sticky', 'closed'), 'subscribers', 'slug')
}
),
)
inlines = [PollAnswerAdmin, ]
class TopicReadTrackerAdmin(admin.ModelAdmin):
list_display = ['topic', 'user', 'time_stamp']
search_fields = ['user__%s' % username_field]
class ForumReadTrackerAdmin(admin.ModelAdmin):
list_display = ['forum', 'user', 'time_stamp']
search_fields = ['user__%s' % username_field]
class PostAdmin(admin.ModelAdmin):
list_display = ['topic', 'user', 'created', 'updated', 'summary']
list_per_page = 20
raw_id_fields = ['user', 'topic']
ordering = ['-created']
date_hierarchy = 'created'
search_fields = ['body']
fieldsets = (
(None, {
'fields': ('topic', 'user')
}
),
(_('Additional options'), {
'classes': ('collapse',),
'fields' : (('created', 'updated'), 'user_ip')
}
),
(_('Message'), {
'fields': ('body', 'body_html', 'body_text')
}
),
)
class ProfileAdmin(admin.ModelAdmin):
list_display = ['user', 'time_zone', 'language', 'post_count']
list_per_page = 20
ordering = ['-user']
search_fields = ['user__%s' % username_field]
fieldsets = (
(None, {
'fields': ('time_zone', 'language')
}
),
(_('Additional options'), {
'classes': ('collapse',),
'fields' : ('avatar', 'signature', 'show_signatures')
}
),
)
class AttachmentAdmin(admin.ModelAdmin):
list_display = ['file', 'size', 'admin_view_post', 'admin_edit_post']
def admin_view_post(self, obj):
return '<a href="%s">view</a>' % obj.post.get_absolute_url()
admin_view_post.allow_tags = True
admin_view_post.short_description = _('View post')
def admin_edit_post(self, obj):
return '<a href="%s">edit</a>' % reverse('admin:pybb_post_change', args=[obj.post.pk])
admin_edit_post.allow_tags = True
admin_edit_post.short_description = _('Edit post')
admin.site.register(Category, CategoryAdmin)
admin.site.register(Forum, ForumAdmin)
admin.site.register(Topic, TopicAdmin)
admin.site.register(Post, PostAdmin)
admin.site.register(Attachment, AttachmentAdmin)
if util.get_pybb_profile_model() == Profile:
admin.site.register(Profile, ProfileAdmin)
# This can be used to debug read/unread trackers
#admin.site.register(TopicReadTracker, TopicReadTrackerAdmin)
#admin.site.register(ForumReadTracker, ForumReadTrackerAdmin) | bsd-2-clause |
sergeLabo/asyncio-osc | irc/schedule.py | 22 | 3089 | """
Classes for calling functions a schedule.
"""
import datetime
import numbers
class DelayedCommand(datetime.datetime):
"""
A command to be executed after some delay (seconds or timedelta).
Clients may override .now() to have dates interpreted in a different
manner, such as to use UTC or to have timezone-aware times.
"""
@classmethod
def now(self, tzinfo=None):
return datetime.datetime.now(tzinfo)
@classmethod
def from_datetime(cls, other):
return cls(other.year, other.month, other.day, other.hour,
other.minute, other.second, other.microsecond,
other.tzinfo)
@classmethod
def after(cls, delay, function):
if not isinstance(delay, datetime.timedelta):
delay = datetime.timedelta(seconds=delay)
due_time = cls.now() + delay
cmd = cls.from_datetime(due_time)
cmd.delay = delay
cmd.function = function
return cmd
@classmethod
def at_time(cls, at, function):
"""
Construct a DelayedCommand to come due at `at`, where `at` may be
a datetime or timestamp. If `at` is a real number, it will be
interpreted as a naive local timestamp.
"""
if isinstance(at, numbers.Real):
at = datetime.datetime.fromtimestamp(at)
cmd = cls.from_datetime(at)
cmd.delay = at - cmd.now()
cmd.function = function
return cmd
def due(self):
return self.now() >= self
class PeriodicCommand(DelayedCommand):
"""
Like a delayed command, but expect this command to run every delay
seconds.
"""
def next(self):
cmd = self.__class__.from_datetime(self + self.delay)
cmd.delay = self.delay
cmd.function = self.function
return cmd
def __setattr__(self, key, value):
if key == 'delay' and not value > datetime.timedelta():
raise ValueError("A PeriodicCommand must have a positive, "
"non-zero delay.")
super(PeriodicCommand, self).__setattr__(key, value)
class PeriodicCommandFixedDelay(PeriodicCommand):
"""
Like a periodic command, but don't calculate the delay based on
the current time. Instead use a fixed delay following the initial
run.
"""
@classmethod
def at_time(cls, at, delay, function):
if isinstance(at, int):
at = datetime.datetime.fromtimestamp(at)
cmd = cls.from_datetime(at)
if not isinstance(delay, datetime.timedelta):
delay = datetime.timedelta(seconds=delay)
cmd.delay = delay
cmd.function = function
return cmd
@classmethod
def daily_at(cls, at, function):
"""
Schedule a command to run at a specific time each day.
"""
daily = datetime.timedelta(days=1)
# convert when to the next datetime matching this time
when = datetime.datetime.combine(datetime.date.today(), at)
if when < cls.now():
when += daily
return cls.at_time(when, daily, function)
| gpl-2.0 |
sunlianqiang/kbengine | kbe/src/lib/python/Lib/re.py | 206 | 15262 | #
# Secret Labs' Regular Expression Engine
#
# re-compatible interface for the sre matching engine
#
# Copyright (c) 1998-2001 by Secret Labs AB. All rights reserved.
#
# This version of the SRE library can be redistributed under CNRI's
# Python 1.6 license. For any other use, please contact Secret Labs
# AB (info@pythonware.com).
#
# Portions of this engine have been developed in cooperation with
# CNRI. Hewlett-Packard provided funding for 1.6 integration and
# other compatibility work.
#
r"""Support for regular expressions (RE).
This module provides regular expression matching operations similar to
those found in Perl. It supports both 8-bit and Unicode strings; both
the pattern and the strings being processed can contain null bytes and
characters outside the US ASCII range.
Regular expressions can contain both special and ordinary characters.
Most ordinary characters, like "A", "a", or "0", are the simplest
regular expressions; they simply match themselves. You can
concatenate ordinary characters, so last matches the string 'last'.
The special characters are:
"." Matches any character except a newline.
"^" Matches the start of the string.
"$" Matches the end of the string or just before the newline at
the end of the string.
"*" Matches 0 or more (greedy) repetitions of the preceding RE.
Greedy means that it will match as many repetitions as possible.
"+" Matches 1 or more (greedy) repetitions of the preceding RE.
"?" Matches 0 or 1 (greedy) of the preceding RE.
*?,+?,?? Non-greedy versions of the previous three special characters.
{m,n} Matches from m to n repetitions of the preceding RE.
{m,n}? Non-greedy version of the above.
"\\" Either escapes special characters or signals a special sequence.
[] Indicates a set of characters.
A "^" as the first character indicates a complementing set.
"|" A|B, creates an RE that will match either A or B.
(...) Matches the RE inside the parentheses.
The contents can be retrieved or matched later in the string.
(?aiLmsux) Set the A, I, L, M, S, U, or X flag for the RE (see below).
(?:...) Non-grouping version of regular parentheses.
(?P<name>...) The substring matched by the group is accessible by name.
(?P=name) Matches the text matched earlier by the group named name.
(?#...) A comment; ignored.
(?=...) Matches if ... matches next, but doesn't consume the string.
(?!...) Matches if ... doesn't match next.
(?<=...) Matches if preceded by ... (must be fixed length).
(?<!...) Matches if not preceded by ... (must be fixed length).
(?(id/name)yes|no) Matches yes pattern if the group with id/name matched,
the (optional) no pattern otherwise.
The special sequences consist of "\\" and a character from the list
below. If the ordinary character is not on the list, then the
resulting RE will match the second character.
\number Matches the contents of the group of the same number.
\A Matches only at the start of the string.
\Z Matches only at the end of the string.
\b Matches the empty string, but only at the start or end of a word.
\B Matches the empty string, but not at the start or end of a word.
\d Matches any decimal digit; equivalent to the set [0-9] in
bytes patterns or string patterns with the ASCII flag.
In string patterns without the ASCII flag, it will match the whole
range of Unicode digits.
\D Matches any non-digit character; equivalent to [^\d].
\s Matches any whitespace character; equivalent to [ \t\n\r\f\v] in
bytes patterns or string patterns with the ASCII flag.
In string patterns without the ASCII flag, it will match the whole
range of Unicode whitespace characters.
\S Matches any non-whitespace character; equivalent to [^\s].
\w Matches any alphanumeric character; equivalent to [a-zA-Z0-9_]
in bytes patterns or string patterns with the ASCII flag.
In string patterns without the ASCII flag, it will match the
range of Unicode alphanumeric characters (letters plus digits
plus underscore).
With LOCALE, it will match the set [0-9_] plus characters defined
as letters for the current locale.
\W Matches the complement of \w.
\\ Matches a literal backslash.
This module exports the following functions:
match Match a regular expression pattern to the beginning of a string.
fullmatch Match a regular expression pattern to all of a string.
search Search a string for the presence of a pattern.
sub Substitute occurrences of a pattern found in a string.
subn Same as sub, but also return the number of substitutions made.
split Split a string by the occurrences of a pattern.
findall Find all occurrences of a pattern in a string.
finditer Return an iterator yielding a match object for each match.
compile Compile a pattern into a RegexObject.
purge Clear the regular expression cache.
escape Backslash all non-alphanumerics in a string.
Some of the functions in this module takes flags as optional parameters:
A ASCII For string patterns, make \w, \W, \b, \B, \d, \D
match the corresponding ASCII character categories
(rather than the whole Unicode categories, which is the
default).
For bytes patterns, this flag is the only available
behaviour and needn't be specified.
I IGNORECASE Perform case-insensitive matching.
L LOCALE Make \w, \W, \b, \B, dependent on the current locale.
M MULTILINE "^" matches the beginning of lines (after a newline)
as well as the string.
"$" matches the end of lines (before a newline) as well
as the end of the string.
S DOTALL "." matches any character at all, including the newline.
X VERBOSE Ignore whitespace and comments for nicer looking RE's.
U UNICODE For compatibility only. Ignored for string patterns (it
is the default), and forbidden for bytes patterns.
This module also defines an exception 'error'.
"""
import sys
import sre_compile
import sre_parse
# public symbols
__all__ = [ "match", "fullmatch", "search", "sub", "subn", "split", "findall",
"compile", "purge", "template", "escape", "A", "I", "L", "M", "S", "X",
"U", "ASCII", "IGNORECASE", "LOCALE", "MULTILINE", "DOTALL", "VERBOSE",
"UNICODE", "error" ]
__version__ = "2.2.1"
# flags
A = ASCII = sre_compile.SRE_FLAG_ASCII # assume ascii "locale"
I = IGNORECASE = sre_compile.SRE_FLAG_IGNORECASE # ignore case
L = LOCALE = sre_compile.SRE_FLAG_LOCALE # assume current 8-bit locale
U = UNICODE = sre_compile.SRE_FLAG_UNICODE # assume unicode "locale"
M = MULTILINE = sre_compile.SRE_FLAG_MULTILINE # make anchors look for newline
S = DOTALL = sre_compile.SRE_FLAG_DOTALL # make dot match newline
X = VERBOSE = sre_compile.SRE_FLAG_VERBOSE # ignore whitespace and comments
# sre extensions (experimental, don't rely on these)
T = TEMPLATE = sre_compile.SRE_FLAG_TEMPLATE # disable backtracking
DEBUG = sre_compile.SRE_FLAG_DEBUG # dump pattern after compilation
# sre exception
error = sre_compile.error
# --------------------------------------------------------------------
# public interface
def match(pattern, string, flags=0):
"""Try to apply the pattern at the start of the string, returning
a match object, or None if no match was found."""
return _compile(pattern, flags).match(string)
def fullmatch(pattern, string, flags=0):
"""Try to apply the pattern to all of the string, returning
a match object, or None if no match was found."""
return _compile(pattern, flags).fullmatch(string)
def search(pattern, string, flags=0):
"""Scan through string looking for a match to the pattern, returning
a match object, or None if no match was found."""
return _compile(pattern, flags).search(string)
def sub(pattern, repl, string, count=0, flags=0):
"""Return the string obtained by replacing the leftmost
non-overlapping occurrences of the pattern in string by the
replacement repl. repl can be either a string or a callable;
if a string, backslash escapes in it are processed. If it is
a callable, it's passed the match object and must return
a replacement string to be used."""
return _compile(pattern, flags).sub(repl, string, count)
def subn(pattern, repl, string, count=0, flags=0):
"""Return a 2-tuple containing (new_string, number).
new_string is the string obtained by replacing the leftmost
non-overlapping occurrences of the pattern in the source
string by the replacement repl. number is the number of
substitutions that were made. repl can be either a string or a
callable; if a string, backslash escapes in it are processed.
If it is a callable, it's passed the match object and must
return a replacement string to be used."""
return _compile(pattern, flags).subn(repl, string, count)
def split(pattern, string, maxsplit=0, flags=0):
"""Split the source string by the occurrences of the pattern,
returning a list containing the resulting substrings. If
capturing parentheses are used in pattern, then the text of all
groups in the pattern are also returned as part of the resulting
list. If maxsplit is nonzero, at most maxsplit splits occur,
and the remainder of the string is returned as the final element
of the list."""
return _compile(pattern, flags).split(string, maxsplit)
def findall(pattern, string, flags=0):
"""Return a list of all non-overlapping matches in the string.
If one or more capturing groups are present in the pattern, return
a list of groups; this will be a list of tuples if the pattern
has more than one group.
Empty matches are included in the result."""
return _compile(pattern, flags).findall(string)
if sys.hexversion >= 0x02020000:
__all__.append("finditer")
def finditer(pattern, string, flags=0):
"""Return an iterator over all non-overlapping matches in the
string. For each match, the iterator returns a match object.
Empty matches are included in the result."""
return _compile(pattern, flags).finditer(string)
def compile(pattern, flags=0):
"Compile a regular expression pattern, returning a pattern object."
return _compile(pattern, flags)
def purge():
"Clear the regular expression caches"
_cache.clear()
_cache_repl.clear()
def template(pattern, flags=0):
"Compile a template pattern, returning a pattern object"
return _compile(pattern, flags|T)
_alphanum_str = frozenset(
"_abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234567890")
_alphanum_bytes = frozenset(
b"_abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234567890")
def escape(pattern):
"""
Escape all the characters in pattern except ASCII letters, numbers and '_'.
"""
if isinstance(pattern, str):
alphanum = _alphanum_str
s = list(pattern)
for i, c in enumerate(pattern):
if c not in alphanum:
if c == "\000":
s[i] = "\\000"
else:
s[i] = "\\" + c
return "".join(s)
else:
alphanum = _alphanum_bytes
s = []
esc = ord(b"\\")
for c in pattern:
if c in alphanum:
s.append(c)
else:
if c == 0:
s.extend(b"\\000")
else:
s.append(esc)
s.append(c)
return bytes(s)
# --------------------------------------------------------------------
# internals
_cache = {}
_cache_repl = {}
_pattern_type = type(sre_compile.compile("", 0))
_MAXCACHE = 512
def _compile(pattern, flags):
# internal: compile pattern
bypass_cache = flags & DEBUG
if not bypass_cache:
try:
return _cache[type(pattern), pattern, flags]
except KeyError:
pass
if isinstance(pattern, _pattern_type):
if flags:
raise ValueError(
"Cannot process flags argument with a compiled pattern")
return pattern
if not sre_compile.isstring(pattern):
raise TypeError("first argument must be string or compiled pattern")
p = sre_compile.compile(pattern, flags)
if not bypass_cache:
if len(_cache) >= _MAXCACHE:
_cache.clear()
_cache[type(pattern), pattern, flags] = p
return p
def _compile_repl(repl, pattern):
# internal: compile replacement pattern
try:
return _cache_repl[repl, pattern]
except KeyError:
pass
p = sre_parse.parse_template(repl, pattern)
if len(_cache_repl) >= _MAXCACHE:
_cache_repl.clear()
_cache_repl[repl, pattern] = p
return p
def _expand(pattern, match, template):
# internal: match.expand implementation hook
template = sre_parse.parse_template(template, pattern)
return sre_parse.expand_template(template, match)
def _subx(pattern, template):
# internal: pattern.sub/subn implementation helper
template = _compile_repl(template, pattern)
if not template[0] and len(template[1]) == 1:
# literal replacement
return template[1][0]
def filter(match, template=template):
return sre_parse.expand_template(template, match)
return filter
# register myself for pickling
import copyreg
def _pickle(p):
return _compile, (p.pattern, p.flags)
copyreg.pickle(_pattern_type, _pickle, _compile)
# --------------------------------------------------------------------
# experimental stuff (see python-dev discussions for details)
class Scanner:
def __init__(self, lexicon, flags=0):
from sre_constants import BRANCH, SUBPATTERN
self.lexicon = lexicon
# combine phrases into a compound pattern
p = []
s = sre_parse.Pattern()
s.flags = flags
for phrase, action in lexicon:
p.append(sre_parse.SubPattern(s, [
(SUBPATTERN, (len(p)+1, sre_parse.parse(phrase, flags))),
]))
s.groups = len(p)+1
p = sre_parse.SubPattern(s, [(BRANCH, (None, p))])
self.scanner = sre_compile.compile(p)
def scan(self, string):
result = []
append = result.append
match = self.scanner.scanner(string).match
i = 0
while 1:
m = match()
if not m:
break
j = m.end()
if i == j:
break
action = self.lexicon[m.lastindex-1][1]
if callable(action):
self.match = m
action = action(self, m.group())
if action is not None:
append(action)
i = j
return result, string[i:]
| lgpl-3.0 |
2013Commons/HUE-SHARK | desktop/core/ext-py/django_nose/build/lib.linux-i686-2.7/django_nose/nose_runner.py | 4 | 2660 | """
Django test runner that invokes nose.
Usage:
./manage.py test DJANGO_ARGS -- NOSE_ARGS
The 'test' argument, and any other args before '--', will not be passed
to nose, allowing django args and nose args to coexist.
You can use
NOSE_ARGS = ['list', 'of', 'args']
in settings.py for arguments that you always want passed to nose.
"""
import sys
from django.conf import settings
from django.db import connection
from django.test import utils
import nose
SETUP_ENV = 'setup_test_environment'
TEARDOWN_ENV = 'teardown_test_environment'
def get_test_enviroment_functions():
"""The functions setup_test_environment and teardown_test_environment in
<appname>.tests modules will be automatically called before and after
running the tests.
"""
setup_funcs = []
teardown_funcs = []
for app_name in settings.INSTALLED_APPS:
mod = __import__(app_name, None, None, ['tests'])
if hasattr(mod, 'tests'):
if hasattr(mod.tests, SETUP_ENV):
setup_funcs.append(getattr(mod.tests, SETUP_ENV))
if hasattr(mod.tests, TEARDOWN_ENV):
teardown_funcs.append(getattr(mod.tests, TEARDOWN_ENV))
return setup_funcs, teardown_funcs
def setup_test_environment(setup_funcs):
utils.setup_test_environment()
for func in setup_funcs:
func()
def teardown_test_environment(teardown_funcs):
utils.teardown_test_environment()
for func in teardown_funcs:
func()
def run_tests_explicit(nose_args, verbosity=1, interactive=True):
"""Setup django and run nose with given arguments."""
setup_funcs, teardown_funcs = get_test_enviroment_functions()
# Prepare django for testing.
setup_test_environment(setup_funcs)
old_db_name = settings.DATABASE_NAME
connection.creation.create_test_db(verbosity, autoclobber=not interactive)
# Pretend it's a production environment.
settings.DEBUG = False
ret = nose.run(argv=nose_args)
# Clean up django.
connection.creation.destroy_test_db(old_db_name, verbosity)
teardown_test_environment(teardown_funcs)
return ret
def run_tests(test_labels, verbosity=1, interactive=True, extra_tests=[]):
"""Calculates nose arguments and runs tests."""
nose_argv = ['nosetests']
if hasattr(settings, 'NOSE_ARGS'):
nose_argv.extend(settings.NOSE_ARGS)
# Everything after '--' is passed to nose.
if '--' in sys.argv:
hyphen_pos = sys.argv.index('--')
nose_argv.extend(sys.argv[hyphen_pos + 1:])
if verbosity >= 1:
print ' '.join(nose_argv)
return run_tests_explicit(nose_argv, verbosity, interactive)
| apache-2.0 |
kotton21/PotteryOnline | server.py | 1 | 2940 | #!/usr/bin/env python
import SimpleHTTPServer
import SocketServer
import PotGenerator
import os
import logging
import logging.handlers
import sys
import time
import rotate3D2
from urlparse import urlparse, parse_qs
try:
os.chdir('/home/pi/PotteryOnline/')
except:
print 'executing from current direcory'
PORT = 8080
# Handler = SimpleHTTPServer.SimpleHTTPRequestHandler
class MySimpleHTTPRequestHandler(SimpleHTTPServer.SimpleHTTPRequestHandler):
def do_GET(self):
"""Serve a GET request."""
#generate the pot before the stuff
print "this is the path.............................."
print "path: %s"%self.path
print self.path == "/mythree.js/examples/my_webgl_loader_stl2.html"
if self.path.strip() == "/fig.jpg":
print "new fig request"
polyLimits = (-.1,.1,-.03,.03,-.0001,.0001)
g = PotGenerator.PolyPotGenerator(polyLimits)
print g.numCurves,': ',[round(c,2) for poly in g for c in poly]
g.plot(True)
if self.path == "/mythree.js/examples/my_webgl_loader_stl2.html":
print "new json pot request"
polyLimits = (-.1,.1,-.03,.03,-.0001,.0001)
g = PotGenerator.PolyPotGenerator(polyLimits)
print g.numCurves,': ',[round(c,2) for poly in g for c in poly]
#g.plot(True)
shape = g.zipPoints()
filename = "./mythree.js/examples/models/json/shape3d.json"
rotate3D2.build_3d_shape(shape, 20, filename)
f = self.send_head()
if f:
try:
#parse the query
query_components = parse_qs(urlparse(self.path).query)
#res = query_components["res"]
print 'components %s'%query_components
print urlparse(self.path).query
self.copyfile(f, self.wfile)
finally:
f.close()
#logging from http://blog.scphillips.com/posts/2013/07/getting-a-python-script-to-run-in-the-background-as-a-service-on-boot/
LOG_FILENAME = "./logs/server.log"
LOG_LEVEL = logging.INFO
logger = logging.getLogger(__name__)
logger.setLevel(LOG_LEVEL)
logHandler = logging.handlers.TimedRotatingFileHandler(LOG_FILENAME, when="midnight", backupCount=7)
formatter = logging.Formatter('%(asctime)s %(levelname)-8s %(message)s')
logHandler.setFormatter(formatter)
logger.addHandler(logHandler)
class MyLogger(object):
def __init__(self, logger, level):
"""Needs a logger and a Logger level."""
self.logger = logger
self.level = level
def write(self, message):
if message.rstrip() != "":
self.logger.log(self.level, message.rstrip())
sys.stdout = MyLogger(logger, logging.INFO)
sys.stderr = MyLogger(logger, logging.ERROR)
#end logging
#usage: logger.info("bla bla") or print "..."
logger.info("starting server")
Handler = MySimpleHTTPRequestHandler
httpd = SocketServer.TCPServer(("", PORT), Handler)
logger.info("serving at port" + str(PORT))
print "serving at port", PORT
try:
httpd.serve_forever()
except KeyboardInterrupt:
httpd.server_close()
#import daemon
#from spam import do_main_program
#with daemon.DaemonContext():
# httpd.serve_forever()
| mit |
CloudWareChile/OpenChile | openerp/addons/l10n_fr/l10n_fr.py | 9 | 1966 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from osv import fields, osv
class l10n_fr_report(osv.osv):
_name = 'l10n.fr.report'
_description = 'Report for l10n_fr'
_columns = {
'code': fields.char('Code', size=64),
'name': fields.char('Name', size=128),
'line_ids': fields.one2many('l10n.fr.line', 'report_id', 'Lines'),
}
_sql_constraints = [
('code_uniq', 'unique (code)','The code report must be unique !')
]
l10n_fr_report()
class l10n_fr_line(osv.osv):
_name = 'l10n.fr.line'
_description = 'Report Lines for l10n_fr'
_columns = {
'code': fields.char('Variable Name', size=64),
'definition': fields.char('Definition', size=512),
'name': fields.char('Name', size=256),
'report_id': fields.many2one('l10n.fr.report', 'Report'),
}
_sql_constraints = [
('code_uniq', 'unique (code)', 'The variable name must be unique !')
]
l10n_fr_line()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: | agpl-3.0 |
xiaojunwu/crosswalk-test-suite | webapi/tct-netinfo-w3c-tests/inst.apk.py | 903 | 3180 | #!/usr/bin/env python
import os
import shutil
import glob
import time
import sys
import subprocess
from optparse import OptionParser, make_option
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
PARAMETERS = None
ADB_CMD = "adb"
def doCMD(cmd):
# Do not need handle timeout in this short script, let tool do it
print "-->> \"%s\"" % cmd
output = []
cmd_return_code = 1
cmd_proc = subprocess.Popen(
cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True)
while True:
output_line = cmd_proc.stdout.readline().strip("\r\n")
cmd_return_code = cmd_proc.poll()
if output_line == '' and cmd_return_code != None:
break
sys.stdout.write("%s\n" % output_line)
sys.stdout.flush()
output.append(output_line)
return (cmd_return_code, output)
def uninstPKGs():
action_status = True
for root, dirs, files in os.walk(SCRIPT_DIR):
for file in files:
if file.endswith(".apk"):
cmd = "%s -s %s uninstall org.xwalk.%s" % (
ADB_CMD, PARAMETERS.device, os.path.basename(os.path.splitext(file)[0]))
(return_code, output) = doCMD(cmd)
for line in output:
if "Failure" in line:
action_status = False
break
return action_status
def instPKGs():
action_status = True
for root, dirs, files in os.walk(SCRIPT_DIR):
for file in files:
if file.endswith(".apk"):
cmd = "%s -s %s install %s" % (ADB_CMD,
PARAMETERS.device, os.path.join(root, file))
(return_code, output) = doCMD(cmd)
for line in output:
if "Failure" in line:
action_status = False
break
return action_status
def main():
try:
usage = "usage: inst.py -i"
opts_parser = OptionParser(usage=usage)
opts_parser.add_option(
"-s", dest="device", action="store", help="Specify device")
opts_parser.add_option(
"-i", dest="binstpkg", action="store_true", help="Install package")
opts_parser.add_option(
"-u", dest="buninstpkg", action="store_true", help="Uninstall package")
global PARAMETERS
(PARAMETERS, args) = opts_parser.parse_args()
except Exception, e:
print "Got wrong option: %s, exit ..." % e
sys.exit(1)
if not PARAMETERS.device:
(return_code, output) = doCMD("adb devices")
for line in output:
if str.find(line, "\tdevice") != -1:
PARAMETERS.device = line.split("\t")[0]
break
if not PARAMETERS.device:
print "No device found"
sys.exit(1)
if PARAMETERS.binstpkg and PARAMETERS.buninstpkg:
print "-i and -u are conflict"
sys.exit(1)
if PARAMETERS.buninstpkg:
if not uninstPKGs():
sys.exit(1)
else:
if not instPKGs():
sys.exit(1)
if __name__ == "__main__":
main()
sys.exit(0)
| bsd-3-clause |
smishenk/blink-crosswalk | Tools/Scripts/webkitpy/formatter/main.py | 48 | 4102 | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import argparse
import lib2to3.refactor
from webkitpy.common.system.systemhost import SystemHost
from webkitpy.thirdparty import autopep8
def parse_args(args=None):
parser = argparse.ArgumentParser()
parser.add_argument('--chromium', action='store_const', dest='style', const='chromium', default='blink',
help="Format according to Chromium's Python coding styles instead of Blink's.")
parser.add_argument('--no-backups', action='store_false', default=True, dest='backup',
help='Do not back up files before overwriting them.')
parser.add_argument('-j', '--jobs', metavar='n', type=int, default=0,
help='Number of parallel jobs; match CPU count if less than 1.')
parser.add_argument('files', nargs='*', default=['-'],
help="files to format or '-' for standard in")
parser.add_argument('--double-quote-strings', action='store_const', dest='quoting', const='double', default='single',
help='Rewrite string literals to use double quotes instead of single quotes.')
parser.add_argument('--no-autopep8', action='store_true',
help='Skip the autopep8 code-formatting step.')
parser.add_argument('--leave-strings-alone', action='store_true',
help='Do not reformat string literals to use a consistent quote style.')
return parser.parse_args(args=args)
def main(host=None, args=None):
options = parse_args(args)
if options.no_autopep8:
options.style = None
if options.leave_strings_alone:
options.quoting = None
autopep8_options = _autopep8_options_for_style(options.style)
fixers = _fixers_for_quoting(options.quoting)
if options.files == ['-']:
host = host or SystemHost()
host.print_(reformat_source(host.stdin.read(), autopep8_options, fixers, '<stdin>'), end='')
return
# We create the arglist before checking if we need to create a Host, because a
# real host is non-picklable and can't be passed to host.executive.map().
arglist = [(host, name, autopep8_options, fixers, options.backup) for name in options.files]
host = host or SystemHost()
host.executive.map(_reformat_thunk, arglist, processes=options.jobs)
def _autopep8_options_for_style(style):
return {
None: [],
'blink': autopep8.parse_args(['--aggressive',
'--max-line-length', '132',
'--indent-size', '4',
'']),
'chromium': autopep8.parse_args(['--aggressive',
'--max-line-length', '80',
'--indent-size', '2',
'']),
}.get(style)
def _fixers_for_quoting(quoting):
return {
None: [],
'double': ['webkitpy.formatter.fix_double_quote_strings'],
'single': ['webkitpy.formatter.fix_single_quote_strings'],
}.get(quoting)
def _reformat_thunk(args):
reformat_file(*args)
def reformat_file(host, name, autopep8_options, fixers, should_backup_file):
host = host or SystemHost()
source = host.filesystem.read_text_file(name)
dest = reformat_source(source, autopep8_options, fixers, name)
if dest != source:
if should_backup_file:
host.filesystem.write_text_file(name + '.bak', source)
host.filesystem.write_text_file(name, dest)
def reformat_source(source, autopep8_options, fixers, name):
tmp_str = source
if autopep8_options:
tmp_str = autopep8.fix_code(tmp_str, autopep8_options)
if fixers:
tool = lib2to3.refactor.RefactoringTool(fixer_names=fixers,
explicit=fixers)
tmp_str = unicode(tool.refactor_string(tmp_str, name=name))
return tmp_str
| bsd-3-clause |
ericholscher/merchant | billing/tests/google_checkout_tests.py | 2 | 27517 | from xml.dom.minidom import Document, parseString
from django.conf import settings
from django.test import TestCase
from django.template import Template, Context
from billing import get_integration
class GoogleCheckoutTestCase(TestCase):
def setUp(self):
self.gc = get_integration("google_checkout")
target_url_name = "example.com/offsite/my_content/"
target_url = 'http://' + target_url_name
fields = {
"items": [
{
"name": "name of the item",
"description": "Item description",
"amount": 0.00,
"id": "999AXZ",
"currency": "USD",
"quantity": 1,
"subscription": {
"type": "merchant", # valid choices is ["merchant", "google"]
"period": "YEARLY", # valid choices is ["DAILY", "WEEKLY", "SEMI_MONTHLY", "MONTHLY", "EVERY_TWO_MONTHS"," QUARTERLY", "YEARLY"]
"payments": [
{
"maximum-charge": 9.99, # Item amount must be "0.00"
"currency": "USD"
}
]
},
"digital-content": {
"display-disposition": "OPTIMISTIC", # valid choices is ['OPTIMISTIC', 'PESSIMISTIC']
"description": "Congratulations! Your subscription is being set up. Feel free to log onto &lt;a href='%s'&gt;%s&lt;/a&gt; and try it out!" % (target_url, target_url_name)
},
},
],
"return_url": "http://127.0.0.1:8000/offsite/google-checkout/",
'private_data': "test@example.com",
}
self.gc.add_fields(fields)
def testFormGen(self):
tmpl = Template("{% load render_integration from billing_tags %}{% render_integration obj %}")
form = tmpl.render(Context({"obj": self.gc}))
dom = parseString(form)
form_action_url = dom.getElementsByTagName('form')[0].attributes['action'].value
input_image_src = dom.getElementsByTagName('input')[2].attributes['src'].value
expected_form_action_url = "https://sandbox.google.com/checkout/api/checkout/v2/checkout/Merchant/%s" % settings.MERCHANT_SETTINGS['google_checkout']['MERCHANT_ID']
expected_input_image_src = "https://sandbox.google.com/checkout/buttons/checkout.gif?merchant_id=%s&w=180&h=46&style=white&variant=text&loc=en_US" % settings.MERCHANT_SETTINGS['google_checkout']['MERCHANT_ID']
self.assertEquals(form_action_url, expected_form_action_url)
self.assertEquals(input_image_src, expected_input_image_src)
def testBuildXML(self):
xml = self.gc.build_xml()
good_xml = """<?xml version="1.0" encoding="utf-8"?><checkout-shopping-cart xmlns="http://checkout.google.com/schema/2"><shopping-cart><items><item><item-name>name of the item</item-name><item-description>Item description</item-description><unit-price currency="USD">0.0</unit-price><quantity>1</quantity><merchant-item-id>999AXZ</merchant-item-id><subscription period="YEARLY" type="merchant"><payments><subscription-payment><maximum-charge currency="USD">9.99</maximum-charge></subscription-payment></payments></subscription><digital-content><display-disposition>OPTIMISTIC</display-disposition><description>Congratulations! Your subscription is being set up. Feel free to log onto &amp;lt;a href=\'http://example.com/offsite/my_content/\'&amp;gt;example.com/offsite/my_content/&amp;lt;/a&amp;gt; and try it out!</description></digital-content></item></items><merchant-private-data>test@example.com</merchant-private-data></shopping-cart><checkout-flow-support><merchant-checkout-flow-support><continue-shopping-url>http://127.0.0.1:8000/offsite/google-checkout/</continue-shopping-url></merchant-checkout-flow-support></checkout-flow-support></checkout-shopping-cart>"""
self.assertEquals(xml, good_xml)
class GoogleCheckoutShippingTestCase(TestCase):
def setUp(self):
self.gc = get_integration("google_checkout")
self.maxDiff = None
def testAddNodes(self):
doc = Document()
parent_node = doc.createElement('parent_node')
doc.appendChild(parent_node)
child_node_values = ['child1', 'child2', 'child3']
self.gc._add_nodes(doc, parent_node, 'child_node','child_sub_node', child_node_values)
xml1 = "<parent_node><child_node><child_sub_node>child1</child_sub_node></child_node>\
<child_node><child_sub_node>child2</child_sub_node></child_node>\
<child_node><child_sub_node>child3</child_sub_node></child_node></parent_node>"
doc_good = parseString(xml1)
self.assertEquals(doc.toxml(), doc_good.toxml())
def testAddNodes_novalues(self):
doc = Document()
parent_node = doc.createElement('parent_node')
doc.appendChild(parent_node)
child_node_values = []
self.gc._add_nodes(doc, parent_node, 'child_node','child_sub_node', child_node_values)
xml1 = """<parent_node></parent_node>"""
doc_good = parseString(xml1)
self.assertEquals(doc.toprettyxml(), doc_good.toprettyxml())
def testShippingExclude(self):
doc = Document()
parent_node = doc.createElement('parent_node')
doc.appendChild(parent_node)
data = {
'us-state-area': ['AK','HI'],
'us-zip-area': ['90210', '04005', '04092'],
'us-country-area': 'CONTINENTAL_48',
'world-area': True,
'postal-area': [{
'country-code': 'US',
'postal-code-pattern': ['94043', '90211'],
},
],
}
self.gc._shipping_allowed_excluded(doc, parent_node, data)
xml1 = "<parent_node><us-state-area><state>AK</state></us-state-area>\
<us-state-area><state>HI</state></us-state-area>\
<us-zip-area><zip-pattern>90210</zip-pattern></us-zip-area>\
<us-zip-area><zip-pattern>04005</zip-pattern></us-zip-area>\
<us-zip-area><zip-pattern>04092</zip-pattern></us-zip-area>\
<us-country-area country-area='CONTINENTAL_48'/>\
<world-area/>\
<postal-area><country-code>US</country-code>\
<postal-code-pattern>94043</postal-code-pattern>\
<postal-code-pattern>90211</postal-code-pattern></postal-area>\
</parent_node>"
doc_good = parseString(xml1)
self.assertEquals(doc.toxml(), doc_good.toxml())
def testShippingRestrictions(self):
""" Not a real data since you would never put the these values for
exclude and include, but wanted to test everything on both sides
should work the same for both allowed and excluded"""
doc = Document()
parent_node = doc.createElement('parent_node')
doc.appendChild(parent_node)
data = {
'allowed-areas': {
'us-state-area': ['ME','NH'],
'us-zip-area': ['04005', '04092'],
'us-country-area': 'ALL',
'world-area': True,
'postal-area': [{
'country-code': 'US',
'postal-code-pattern': ['94043', '90211'],
},
],
},
'excluded-areas': {
'us-state-area': ['AK','HI'],
'us-zip-area': ['90210'],
'us-country-area': 'CONTINENTAL_48',
'world-area': False,
'postal-area': [{
'country-code': 'US',
'postal-code-pattern': ['11111', '11112'],
},
],
},
}
self.gc._shipping_restrictions_filters(doc, parent_node, data)
xml1 = "<parent_node><allowed-areas><us-state-area>\
<state>ME</state></us-state-area>\
<us-state-area><state>NH</state></us-state-area>\
<us-zip-area><zip-pattern>04005</zip-pattern></us-zip-area>\
<us-zip-area><zip-pattern>04092</zip-pattern></us-zip-area>\
<us-country-area country-area='ALL'/>\
<world-area/>\
<postal-area><country-code>US</country-code>\
<postal-code-pattern>94043</postal-code-pattern>\
<postal-code-pattern>90211</postal-code-pattern></postal-area>\
</allowed-areas>\
<excluded-areas><us-state-area><state>AK</state></us-state-area>\
<us-state-area><state>HI</state></us-state-area>\
<us-zip-area><zip-pattern>90210</zip-pattern></us-zip-area>\
<us-country-area country-area='CONTINENTAL_48'/>\
<postal-area><country-code>US</country-code>\
<postal-code-pattern>11111</postal-code-pattern>\
<postal-code-pattern>11112</postal-code-pattern></postal-area>\
</excluded-areas></parent_node>"
doc_good = parseString(xml1)
self.assertEquals(doc.toxml(), doc_good.toxml())
def testFullCartXML(self):
fields = {"items": [{
"name": "name of the item",
"description": "Item description",
"amount": 1,
"id": "999AXZ",
"currency": "USD",
"quantity": 1,
}],
'shipping-methods': [
{'shipping_type':'flat-rate-shipping',
'name':"UPS Next Day Air",
'currency':"USD",
'price':20.00,
'shipping-restrictions': {
'allow-us-po-box': False,
'excluded-areas': {
'us-state-area' : ['AK', 'HI']
}
}
},
{'shipping_type':'flat-rate-shipping',
'name':"UPS Ground",
'currency':"USD",
'price':15.00,
'shipping-restrictions': {
'allow-us-po-box': False,
}
},
],
"return_url": "http://127.0.0.1:8000/offsite/google-checkout/",
}
self.gc.add_fields(fields)
xml = self.gc.build_xml()
good_xml = """<?xml version="1.0" encoding="utf-8"?><checkout-shopping-cart xmlns="http://checkout.google.com/schema/2"><shopping-cart><items><item><item-name>name of the item</item-name><item-description>Item description</item-description><unit-price currency="USD">1</unit-price><quantity>1</quantity><merchant-item-id>999AXZ</merchant-item-id></item></items><merchant-private-data></merchant-private-data></shopping-cart><checkout-flow-support><merchant-checkout-flow-support><continue-shopping-url>http://127.0.0.1:8000/offsite/google-checkout/</continue-shopping-url><shipping-methods><flat-rate-shipping name="UPS Next Day Air"><price currency="USD">20.0</price><shipping-restrictions><allow-us-po-box>false</allow-us-po-box><excluded-areas><us-state-area><state>AK</state></us-state-area><us-state-area><state>HI</state></us-state-area></excluded-areas></shipping-restrictions></flat-rate-shipping><flat-rate-shipping name="UPS Ground"><price currency="USD">15.0</price><shipping-restrictions><allow-us-po-box>false</allow-us-po-box></shipping-restrictions></flat-rate-shipping></shipping-methods></merchant-checkout-flow-support></checkout-flow-support></checkout-shopping-cart>"""
self.assertEquals(xml, good_xml)
class GoogleCheckoutTaxTestCase(TestCase):
""" Test the tax code """
def setUp(self):
self.gc = get_integration("google_checkout")
self.maxDiff = None
def testTaxes1(self):
doc = Document()
parent_node = doc.createElement('parent_node')
doc.appendChild(parent_node)
data = {
'default-tax-table': {
'tax-rules': [
{
'shipping-taxed': True,
'rate': 0.06,
'tax-area': {
'us-state-area': ['CT'],
}
}
]
}
}
self.gc._taxes(doc, parent_node, data)
xml1 = "<parent_node><tax-tables><default-tax-table><tax-rules>\
<default-tax-rule><shipping-taxed>true</shipping-taxed><rate>0.06</rate>\
<tax-area><us-state-area><state>CT</state></us-state-area></tax-area>\
</default-tax-rule></tax-rules></default-tax-table></tax-tables>\
</parent_node>"
doc_good = parseString(xml1)
self.assertEquals(doc.toxml(), doc_good.toxml())
def testTaxes2(self):
doc = Document()
parent_node = doc.createElement('parent_node')
doc.appendChild(parent_node)
data = {
'default-tax-table': {
'tax-rules': [
{
'shipping-taxed': True,
'rate': 0.06,
'tax-area': {
'us-state-area': ['CT'],
}
},
{
'rate': 0.05,
'tax-area': {
'us-state-area': ['MD'],
}
}
]
}
}
self.gc._taxes(doc, parent_node, data)
xml1 = "<parent_node><tax-tables><default-tax-table><tax-rules>\
<default-tax-rule><shipping-taxed>true</shipping-taxed><rate>0.06</rate>\
<tax-area><us-state-area><state>CT</state></us-state-area></tax-area>\
</default-tax-rule><default-tax-rule><shipping-taxed>false</shipping-taxed>\
<rate>0.05</rate><tax-area><us-state-area><state>MD</state></us-state-area>\
</tax-area></default-tax-rule></tax-rules></default-tax-table></tax-tables>\
</parent_node>"
doc_good = parseString(xml1)
self.assertEquals(doc.toxml(), doc_good.toxml())
def testTaxes3(self):
doc = Document()
parent_node = doc.createElement('parent_node')
doc.appendChild(parent_node)
data = {
'default-tax-table': {
'tax-rules': [
{
'shipping-taxed': False,
'rate': 0.08375,
'tax-area': {
'us-zip-area': ['100*'],
}
},
{
'shipping-taxed': True,
'rate': 0.04,
'tax-area': {
'us-state-area': ['NY'],
}
}
]
}
}
self.gc._taxes(doc, parent_node, data)
xml1 = "<parent_node><tax-tables><default-tax-table>\
<tax-rules><default-tax-rule><shipping-taxed>false</shipping-taxed>\
<rate>0.08375</rate><tax-area><us-zip-area><zip-pattern>100*</zip-pattern>\
</us-zip-area></tax-area></default-tax-rule>\
<default-tax-rule><shipping-taxed>true</shipping-taxed>\
<rate>0.04</rate><tax-area><us-state-area><state>NY</state></us-state-area>\
</tax-area></default-tax-rule>\
</tax-rules></default-tax-table></tax-tables></parent_node>"
doc_good = parseString(xml1)
self.assertEquals(doc.toxml(), doc_good.toxml())
def testTaxes4(self):
doc = Document()
parent_node = doc.createElement('parent_node')
doc.appendChild(parent_node)
data = {
'default-tax-table': {
'tax-rules': [
{
'shipping-taxed': False,
'rate': 0.08375,
'tax-area': {
'us-zip-area': ['100*', '040*'],
}
},
{
'shipping-taxed': True,
'rate': 0.04,
'tax-area': {
'us-state-area': ['NY', 'ME'],
}
}
]
}
}
self.gc._taxes(doc, parent_node, data)
xml1 = "<parent_node><tax-tables><default-tax-table>\
<tax-rules><default-tax-rule><shipping-taxed>false</shipping-taxed>\
<rate>0.08375</rate><tax-areas><us-zip-area><zip-pattern>100*</zip-pattern>\
</us-zip-area><us-zip-area><zip-pattern>040*</zip-pattern>\
</us-zip-area></tax-areas></default-tax-rule>\
<default-tax-rule><shipping-taxed>true</shipping-taxed>\
<rate>0.04</rate><tax-areas><us-state-area><state>NY</state></us-state-area>\
<us-state-area><state>ME</state></us-state-area>\
</tax-areas></default-tax-rule>\
</tax-rules></default-tax-table></tax-tables></parent_node>"
doc_good = parseString(xml1)
self.assertEquals(doc.toxml(), doc_good.toxml())
def testTaxes5(self):
doc = Document()
parent_node = doc.createElement('parent_node')
doc.appendChild(parent_node)
data = {
'default-tax-table': {
'tax-rules': [
{
'shipping-taxed': True,
'rate': 0.06,
'tax-area': {
'us-state-area': ['CT'],
}
},
{
'rate': 0.05,
'tax-area': {
'us-state-area': ['MD'],
}
}
]
},
'alternate-tax-tables': [
{'name': 'bicycle_helmets',
'standalone': False,
'alternative-tax-rules': [
{ 'rate': 0,
'tax-area': {
'us-state-area': ['CT'],
}
}
]
}
]
}
self.gc._taxes(doc, parent_node, data)
xml1 = "<parent_node><tax-tables>\
<default-tax-table><tax-rules><default-tax-rule>\
<shipping-taxed>true</shipping-taxed><rate>0.06</rate>\
<tax-area><us-state-area><state>CT</state></us-state-area>\
</tax-area></default-tax-rule><default-tax-rule>\
<shipping-taxed>false</shipping-taxed><rate>0.05</rate>\
<tax-area><us-state-area><state>MD</state></us-state-area>\
</tax-area></default-tax-rule></tax-rules></default-tax-table>\
<alternate-tax-tables><alternate-tax-table name='bicycle_helmets' standalone='false'>\
<alternate-tax-rules><alternate-tax-rule><rate>0</rate>\
<tax-area><us-state-area><state>CT</state></us-state-area></tax-area>\
</alternate-tax-rule></alternate-tax-rules></alternate-tax-table>\
</alternate-tax-tables></tax-tables></parent_node>"
doc_good = parseString(xml1)
self.assertEquals(doc.toxml(), doc_good.toxml())
def testTaxes6(self):
doc = Document()
parent_node = doc.createElement('parent_node')
doc.appendChild(parent_node)
data = {
'default-tax-table': {
'tax-rules': [
{
'shipping-taxed': True,
'rate': 0.06,
'tax-area': {
'us-state-area': ['CT'],
}
},
{
'rate': 0.05,
'tax-area': {
'us-state-area': ['MD'],
}
}
]
},
'alternate-tax-tables': [
{'name': 'tax_exempt',
'standalone': True,
}
]
}
self.gc._taxes(doc, parent_node, data)
xml1 = "<parent_node><tax-tables>\
<default-tax-table><tax-rules><default-tax-rule>\
<shipping-taxed>true</shipping-taxed><rate>0.06</rate>\
<tax-area><us-state-area><state>CT</state></us-state-area>\
</tax-area></default-tax-rule><default-tax-rule>\
<shipping-taxed>false</shipping-taxed><rate>0.05</rate>\
<tax-area><us-state-area><state>MD</state></us-state-area>\
</tax-area></default-tax-rule></tax-rules></default-tax-table>\
<alternate-tax-tables><alternate-tax-table name='tax_exempt' standalone='true'>\
<alternate-tax-rules/></alternate-tax-table>\
</alternate-tax-tables></tax-tables></parent_node>"
doc_good = parseString(xml1)
self.assertEquals(doc.toxml(), doc_good.toxml())
def testTaxes7(self):
doc = Document()
parent_node = doc.createElement('parent_node')
doc.appendChild(parent_node)
data = {
'default-tax-table': {
'tax-rules': [
{
'shipping-taxed': True,
'rate': 0.175,
'tax-area': {
'postal-area': [
{'country-code': 'DE'},
{'country-code': 'ES'},
{'country-code': 'GB'},
],
},
},
]
},
}
self.gc._taxes(doc, parent_node, data)
xml1 = "<parent_node><tax-tables>\
<default-tax-table><tax-rules><default-tax-rule>\
<shipping-taxed>true</shipping-taxed><rate>0.175</rate>\
<tax-areas><postal-area><country-code>DE</country-code>\
</postal-area><postal-area><country-code>ES</country-code>\
</postal-area><postal-area><country-code>GB</country-code>\
</postal-area></tax-areas></default-tax-rule></tax-rules>\
</default-tax-table></tax-tables></parent_node>"
doc_good = parseString(xml1)
self.assertEquals(doc.toxml(), doc_good.toxml())
def testTaxes8(self):
doc = Document()
parent_node = doc.createElement('parent_node')
doc.appendChild(parent_node)
data = {
'default-tax-table': {
'tax-rules': [
{
'shipping-taxed': True,
'rate': 0.175,
'tax-area': {
'world-area': True,
},
},
]
},
'alternate-tax-tables': [
{'name': 'reduced',
'standalone': True,
'alternative-tax-rules': [
{ 'rate': 0.05,
'tax-area': {
'world-area': True,
}
},
]
},
{ 'name': 'tax_exempt',
'standalone': True,
}
]
}
self.gc._taxes(doc, parent_node, data)
xml1 = "<parent_node><tax-tables>\
<default-tax-table><tax-rules>\
<default-tax-rule><shipping-taxed>true</shipping-taxed>\
<rate>0.175</rate><tax-area><world-area/></tax-area>\
</default-tax-rule></tax-rules></default-tax-table>\
<alternate-tax-tables><alternate-tax-table name='reduced' standalone='true'>\
<alternate-tax-rules><alternate-tax-rule><rate>0.05</rate><tax-area>\
<world-area/></tax-area></alternate-tax-rule></alternate-tax-rules>\
</alternate-tax-table><alternate-tax-table standalone='true' name='tax_exempt'>\
<alternate-tax-rules/></alternate-tax-table></alternate-tax-tables>\
</tax-tables></parent_node>"
doc_good = parseString(xml1)
self.assertEquals(doc.toxml(), doc_good.toxml())
def testFullCartXML(self):
fields = {"items": [{
"name": "name of the item",
"description": "Item description",
"amount": 1,
"id": "999AXZ",
"currency": "USD",
"quantity": 1,
},
{
"name": "tax free item",
"description": "Item description",
"amount": 2,
"id": "999AXZ",
"currency": "USD",
"quantity": 1,
"tax-table-selector": 'tax_exempt',
},
],
'tax-tables': {
'default-tax-table': {
'tax-rules': [
{
'shipping-taxed': False,
'rate': 0.08375,
'tax-area': {
'us-zip-area': ['100*'],
}
},
{
'shipping-taxed': True,
'rate': 0.04,
'tax-area': {
'us-state-area': ['NY'],
}
}
]
},
'alternate-tax-tables': [
{
'name': 'tax_exempt',
'standalone': True,
}
]
},
"return_url": "http://127.0.0.1:8000/offsite/google-checkout/",
}
self.gc.add_fields(fields)
xml = self.gc.build_xml()
good_xml = """<?xml version="1.0" encoding="utf-8"?><checkout-shopping-cart xmlns="http://checkout.google.com/schema/2"><shopping-cart><items><item><item-name>name of the item</item-name><item-description>Item description</item-description><unit-price currency="USD">1</unit-price><quantity>1</quantity><merchant-item-id>999AXZ</merchant-item-id></item><item><item-name>tax free item</item-name><item-description>Item description</item-description><unit-price currency="USD">2</unit-price><quantity>1</quantity><merchant-item-id>999AXZ</merchant-item-id><tax-table-selector>tax_exempt</tax-table-selector></item></items><merchant-private-data></merchant-private-data></shopping-cart><checkout-flow-support><merchant-checkout-flow-support><continue-shopping-url>http://127.0.0.1:8000/offsite/google-checkout/</continue-shopping-url><tax-tables><default-tax-table><tax-rules><default-tax-rule><shipping-taxed>false</shipping-taxed><rate>0.08375</rate><tax-area><us-zip-area><zip-pattern>100*</zip-pattern></us-zip-area></tax-area></default-tax-rule><default-tax-rule><shipping-taxed>true</shipping-taxed><rate>0.04</rate><tax-area><us-state-area><state>NY</state></us-state-area></tax-area></default-tax-rule></tax-rules></default-tax-table><alternate-tax-tables><alternate-tax-table name="tax_exempt" standalone="true"><alternate-tax-rules/></alternate-tax-table></alternate-tax-tables></tax-tables></merchant-checkout-flow-support></checkout-flow-support></checkout-shopping-cart>"""
self.assertEquals(xml, good_xml)
| bsd-3-clause |
yv84/pyph | src/tests/integrate_tests/tcp_echo.py | 1 | 5366 | #!/usr/bin/env python3
"""TCP echo server example."""
import argparse
import asyncio
import sys
import os
try:
import signal
except ImportError:
signal = None
from msg_log import Message
from game_log import log
ARGS = argparse.ArgumentParser(description="PyPh test fixtures.")
ARGS.add_argument(
'--server', action="store_true", dest='server',
default=False, help='Run tcp server')
ARGS.add_argument(
'--client', action="store_true", dest='client',
default=False, help='Run tcp client')
ARGS.add_argument(
'--host', action="store", dest='host',
default='127.0.0.1', help='Host name')
ARGS.add_argument(
'--port', action="store", dest='port',
default=9999, type=int, help='Port number')
ARGS.add_argument(
'--iocp', action="store_true", dest='iocp',
default=False, help='Use IOCP event loop')
ARGS.add_argument("--game", dest='game', type=str, required=False,
help='aa || l2', default='aa')
ARGS.add_argument("--l2_chronicle", dest='l2_chronicle', type=str, required=False,
help='so many options', default='gracia_final')
args = ARGS.parse_args()
# ---------------------------------------
if args.game == '_l2':
f = os.path.join(os.path.dirname(__file__), 'fixtures/l2', 'game_log_with_xor_len.log')
pattern = {'c': b'client:', 's': b'server:', 'start': 10, 'end': -2}
if args.game == 'l2':
f = os.path.join(os.path.dirname(__file__), 'fixtures/l2', 'game_log_15122012_with_pck_len.log')
pattern = {'c': b'client:', 's': b'server:', 'start': 10, 'end': -2}
elif args.game == 'aa':
f = os.path.join(os.path.dirname(__file__), 'fixtures/aa', 'game_1.log')
pattern = {'c': b"c->", 's': b"s->", 'start': 3, 'end': -2}
# ---------------------------------------
log = Message.get_log_from_file(f, pattern)
log, side_log = Message.game_log_from_import(log)
print(log, side_log)
class EchoServer(asyncio.Protocol):
TIMEOUT = 5.0
message_server = {}
def timeout(self):
print('connection timeout, closing.')
self.transport.close()
def connection_made(self, transport):
print('connection made')
self.transport = transport
self.message_server[self.transport] = Message('server', log=log, side_log=side_log)
# start 5 seconds timeout timer
self.h_timeout = asyncio.get_event_loop().call_later(
self.TIMEOUT, self.timeout)
def data_received(self, data):
#print('data received: ', data.decode())
#print('S: ', data)
#self.transport.write(b'Re: ' + data)
data = b''.join(self.message_server[self.transport](data))
if data:
self.transport.write(data)
else:
self.transport.close()
#print('S send: ', b''.join(self.message_server[self.transport](data)))
# restart timeout timer
self.h_timeout.cancel()
self.h_timeout = asyncio.get_event_loop().call_later(
self.TIMEOUT, self.timeout)
def eof_received(self):
pass
def connection_lost(self, exc):
print('connection lost:', exc)
self.h_timeout.cancel()
class EchoClient(asyncio.Protocol):
message = 'This is the message. It will be echoed.'
message_client = Message('client', log=log, side_log=side_log)
def connection_made(self, transport):
self.transport = transport
#print(b''.join(self.message_client(b'')))
self.transport.write(b''.join(self.message_client(b'')))
#self.transport.write(self.message.encode())
#print('data sent:', self.message)
def data_received(self, data):
#print('C:', data)
data = b''.join(self.message_client(data))
if data:
self.transport.write(data)
else:
self.transport.close()
# disconnect after 10 seconds
asyncio.get_event_loop().call_later(10.0, self.transport.close)
def eof_received(self):
pass
def connection_lost(self, exc):
print('connection lost:', exc)
asyncio.get_event_loop().stop()
def start_client(loop, host, port):
t = asyncio.Task(loop.create_connection(EchoClient, host, port))
loop.run_until_complete(t)
def start_server(loop, host, port):
f = loop.create_server(EchoServer, host, port)
return loop.run_until_complete(f)
if __name__ == '__main__':
if ':' in args.host:
args.host, port = args.host.split(':', 1)
args.port = int(port)
if (not (args.server or args.client)) or (args.server and args.client):
print('Please specify --server or --client\n')
ARGS.print_help()
else:
if args.iocp:
from asyncio import windows_events
loop = windows_events.ProactorEventLoop()
asyncio.set_event_loop(loop)
else:
loop = asyncio.get_event_loop()
print ('Using backend: {0}'.format(loop.__class__.__name__))
if signal is not None and sys.platform != 'win32':
loop.add_signal_handler(signal.SIGINT, loop.stop)
if args.server:
server = start_server(loop, args.host, args.port)
else:
start_client(loop, args.host, args.port)
try:
loop.run_forever()
finally:
if args.server:
server.close()
loop.close()
| mit |
dims/glance | glance/api/v2/model/metadef_namespace.py | 20 | 3021 | # Copyright (c) 2014 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import wsme
from wsme.rest import json
from wsme import types
from glance.api.v2.model.metadef_object import MetadefObject
from glance.api.v2.model.metadef_property_type import PropertyType
from glance.api.v2.model.metadef_resource_type import ResourceTypeAssociation
from glance.api.v2.model.metadef_tag import MetadefTag
from glance.common.wsme_utils import WSMEModelTransformer
class Namespace(types.Base, WSMEModelTransformer):
# Base fields
namespace = wsme.wsattr(types.text, mandatory=True)
display_name = wsme.wsattr(types.text, mandatory=False)
description = wsme.wsattr(types.text, mandatory=False)
visibility = wsme.wsattr(types.text, mandatory=False)
protected = wsme.wsattr(bool, mandatory=False)
owner = wsme.wsattr(types.text, mandatory=False)
# Not using datetime since time format has to be
# in oslo_utils.timeutils.isotime() format
created_at = wsme.wsattr(types.text, mandatory=False)
updated_at = wsme.wsattr(types.text, mandatory=False)
# Contained fields
resource_type_associations = wsme.wsattr([ResourceTypeAssociation],
mandatory=False)
properties = wsme.wsattr({types.text: PropertyType}, mandatory=False)
objects = wsme.wsattr([MetadefObject], mandatory=False)
tags = wsme.wsattr([MetadefTag], mandatory=False)
# Generated fields
self = wsme.wsattr(types.text, mandatory=False)
schema = wsme.wsattr(types.text, mandatory=False)
def __init__(cls, **kwargs):
super(Namespace, cls).__init__(**kwargs)
@staticmethod
def to_model_properties(db_property_types):
property_types = {}
for db_property_type in db_property_types:
# Convert the persisted json schema to a dict of PropertyTypes
property_type = json.fromjson(
PropertyType, db_property_type.schema)
property_type_name = db_property_type.name
property_types[property_type_name] = property_type
return property_types
class Namespaces(types.Base, WSMEModelTransformer):
namespaces = wsme.wsattr([Namespace], mandatory=False)
# Pagination
next = wsme.wsattr(types.text, mandatory=False)
schema = wsme.wsattr(types.text, mandatory=True)
first = wsme.wsattr(types.text, mandatory=True)
def __init__(self, **kwargs):
super(Namespaces, self).__init__(**kwargs)
| apache-2.0 |
sdphome/UHF_Reader | u-boot-2015.04/test/image/test-fit.py | 3 | 12492 | #!/usr/bin/python
#
# Copyright (c) 2013, Google Inc.
#
# Sanity check of the FIT handling in U-Boot
#
# SPDX-License-Identifier: GPL-2.0+
#
# To run this:
#
# make O=sandbox sandbox_config
# make O=sandbox
# ./test/image/test-fit.py -u sandbox/u-boot
import doctest
from optparse import OptionParser
import os
import shutil
import struct
import sys
import tempfile
# Enable printing of all U-Boot output
DEBUG = True
# The 'command' library in patman is convenient for running commands
base_path = os.path.dirname(sys.argv[0])
patman = os.path.join(base_path, '../../tools/patman')
sys.path.append(patman)
import command
# Define a base ITS which we can adjust using % and a dictionary
base_its = '''
/dts-v1/;
/ {
description = "Chrome OS kernel image with one or more FDT blobs";
#address-cells = <1>;
images {
kernel@1 {
data = /incbin/("%(kernel)s");
type = "kernel";
arch = "sandbox";
os = "linux";
compression = "none";
load = <0x40000>;
entry = <0x8>;
};
fdt@1 {
description = "snow";
data = /incbin/("u-boot.dtb");
type = "flat_dt";
arch = "sandbox";
%(fdt_load)s
compression = "none";
signature@1 {
algo = "sha1,rsa2048";
key-name-hint = "dev";
};
};
ramdisk@1 {
description = "snow";
data = /incbin/("%(ramdisk)s");
type = "ramdisk";
arch = "sandbox";
os = "linux";
%(ramdisk_load)s
compression = "none";
};
};
configurations {
default = "conf@1";
conf@1 {
kernel = "kernel@1";
fdt = "fdt@1";
%(ramdisk_config)s
};
};
};
'''
# Define a base FDT - currently we don't use anything in this
base_fdt = '''
/dts-v1/;
/ {
model = "Sandbox Verified Boot Test";
compatible = "sandbox";
};
'''
# This is the U-Boot script that is run for each test. First load the fit,
# then do the 'bootm' command, then save out memory from the places where
# we expect 'bootm' to write things. Then quit.
base_script = '''
sb load hostfs 0 %(fit_addr)x %(fit)s
fdt addr %(fit_addr)x
bootm start %(fit_addr)x
bootm loados
sb save hostfs 0 %(kernel_addr)x %(kernel_out)s %(kernel_size)x
sb save hostfs 0 %(fdt_addr)x %(fdt_out)s %(fdt_size)x
sb save hostfs 0 %(ramdisk_addr)x %(ramdisk_out)s %(ramdisk_size)x
reset
'''
def debug_stdout(stdout):
if DEBUG:
print stdout
def make_fname(leaf):
"""Make a temporary filename
Args:
leaf: Leaf name of file to create (within temporary directory)
Return:
Temporary filename
"""
global base_dir
return os.path.join(base_dir, leaf)
def filesize(fname):
"""Get the size of a file
Args:
fname: Filename to check
Return:
Size of file in bytes
"""
return os.stat(fname).st_size
def read_file(fname):
"""Read the contents of a file
Args:
fname: Filename to read
Returns:
Contents of file as a string
"""
with open(fname, 'r') as fd:
return fd.read()
def make_dtb():
"""Make a sample .dts file and compile it to a .dtb
Returns:
Filename of .dtb file created
"""
src = make_fname('u-boot.dts')
dtb = make_fname('u-boot.dtb')
with open(src, 'w') as fd:
print >>fd, base_fdt
command.Output('dtc', src, '-O', 'dtb', '-o', dtb)
return dtb
def make_its(params):
"""Make a sample .its file with parameters embedded
Args:
params: Dictionary containing parameters to embed in the %() strings
Returns:
Filename of .its file created
"""
its = make_fname('test.its')
with open(its, 'w') as fd:
print >>fd, base_its % params
return its
def make_fit(mkimage, params):
"""Make a sample .fit file ready for loading
This creates a .its script with the selected parameters and uses mkimage to
turn this into a .fit image.
Args:
mkimage: Filename of 'mkimage' utility
params: Dictionary containing parameters to embed in the %() strings
Return:
Filename of .fit file created
"""
fit = make_fname('test.fit')
its = make_its(params)
command.Output(mkimage, '-f', its, fit)
with open(make_fname('u-boot.dts'), 'w') as fd:
print >>fd, base_fdt
return fit
def make_kernel():
"""Make a sample kernel with test data
Returns:
Filename of kernel created
"""
fname = make_fname('test-kernel.bin')
data = ''
for i in range(100):
data += 'this kernel %d is unlikely to boot\n' % i
with open(fname, 'w') as fd:
print >>fd, data
return fname
def make_ramdisk():
"""Make a sample ramdisk with test data
Returns:
Filename of ramdisk created
"""
fname = make_fname('test-ramdisk.bin')
data = ''
for i in range(100):
data += 'ramdisk %d was seldom used in the middle ages\n' % i
with open(fname, 'w') as fd:
print >>fd, data
return fname
def find_matching(text, match):
"""Find a match in a line of text, and return the unmatched line portion
This is used to extract a part of a line from some text. The match string
is used to locate the line - we use the first line that contains that
match text.
Once we find a match, we discard the match string itself from the line,
and return what remains.
TODO: If this function becomes more generally useful, we could change it
to use regex and return groups.
Args:
text: Text to check (each line separated by \n)
match: String to search for
Return:
String containing unmatched portion of line
Exceptions:
ValueError: If match is not found
>>> find_matching('first line:10\\nsecond_line:20', 'first line:')
'10'
>>> find_matching('first line:10\\nsecond_line:20', 'second linex')
Traceback (most recent call last):
...
ValueError: Test aborted
>>> find_matching('first line:10\\nsecond_line:20', 'second_line:')
'20'
"""
for line in text.splitlines():
pos = line.find(match)
if pos != -1:
return line[:pos] + line[pos + len(match):]
print "Expected '%s' but not found in output:"
print text
raise ValueError('Test aborted')
def set_test(name):
"""Set the name of the current test and print a message
Args:
name: Name of test
"""
global test_name
test_name = name
print name
def fail(msg, stdout):
"""Raise an error with a helpful failure message
Args:
msg: Message to display
"""
print stdout
raise ValueError("Test '%s' failed: %s" % (test_name, msg))
def run_fit_test(mkimage, u_boot):
"""Basic sanity check of FIT loading in U-Boot
TODO: Almost everything:
- hash algorithms - invalid hash/contents should be detected
- signature algorithms - invalid sig/contents should be detected
- compression
- checking that errors are detected like:
- image overwriting
- missing images
- invalid configurations
- incorrect os/arch/type fields
- empty data
- images too large/small
- invalid FDT (e.g. putting a random binary in instead)
- default configuration selection
- bootm command line parameters should have desired effect
- run code coverage to make sure we are testing all the code
"""
global test_name
# Set up invariant files
control_dtb = make_dtb()
kernel = make_kernel()
ramdisk = make_ramdisk()
kernel_out = make_fname('kernel-out.bin')
fdt_out = make_fname('fdt-out.dtb')
ramdisk_out = make_fname('ramdisk-out.bin')
# Set up basic parameters with default values
params = {
'fit_addr' : 0x1000,
'kernel' : kernel,
'kernel_out' : kernel_out,
'kernel_addr' : 0x40000,
'kernel_size' : filesize(kernel),
'fdt_out' : fdt_out,
'fdt_addr' : 0x80000,
'fdt_size' : filesize(control_dtb),
'fdt_load' : '',
'ramdisk' : ramdisk,
'ramdisk_out' : ramdisk_out,
'ramdisk_addr' : 0xc0000,
'ramdisk_size' : filesize(ramdisk),
'ramdisk_load' : '',
'ramdisk_config' : '',
}
# Make a basic FIT and a script to load it
fit = make_fit(mkimage, params)
params['fit'] = fit
cmd = base_script % params
# First check that we can load a kernel
# We could perhaps reduce duplication with some loss of readability
set_test('Kernel load')
stdout = command.Output(u_boot, '-d', control_dtb, '-c', cmd)
debug_stdout(stdout)
if read_file(kernel) != read_file(kernel_out):
fail('Kernel not loaded', stdout)
if read_file(control_dtb) == read_file(fdt_out):
fail('FDT loaded but should be ignored', stdout)
if read_file(ramdisk) == read_file(ramdisk_out):
fail('Ramdisk loaded but should not be', stdout)
# Find out the offset in the FIT where U-Boot has found the FDT
line = find_matching(stdout, 'Booting using the fdt blob at ')
fit_offset = int(line, 16) - params['fit_addr']
fdt_magic = struct.pack('>L', 0xd00dfeed)
data = read_file(fit)
# Now find where it actually is in the FIT (skip the first word)
real_fit_offset = data.find(fdt_magic, 4)
if fit_offset != real_fit_offset:
fail('U-Boot loaded FDT from offset %#x, FDT is actually at %#x' %
(fit_offset, real_fit_offset), stdout)
# Now a kernel and an FDT
set_test('Kernel + FDT load')
params['fdt_load'] = 'load = <%#x>;' % params['fdt_addr']
fit = make_fit(mkimage, params)
stdout = command.Output(u_boot, '-d', control_dtb, '-c', cmd)
debug_stdout(stdout)
if read_file(kernel) != read_file(kernel_out):
fail('Kernel not loaded', stdout)
if read_file(control_dtb) != read_file(fdt_out):
fail('FDT not loaded', stdout)
if read_file(ramdisk) == read_file(ramdisk_out):
fail('Ramdisk loaded but should not be', stdout)
# Try a ramdisk
set_test('Kernel + FDT + Ramdisk load')
params['ramdisk_config'] = 'ramdisk = "ramdisk@1";'
params['ramdisk_load'] = 'load = <%#x>;' % params['ramdisk_addr']
fit = make_fit(mkimage, params)
stdout = command.Output(u_boot, '-d', control_dtb, '-c', cmd)
debug_stdout(stdout)
if read_file(ramdisk) != read_file(ramdisk_out):
fail('Ramdisk not loaded', stdout)
def run_tests():
"""Parse options, run the FIT tests and print the result"""
global base_path, base_dir
# Work in a temporary directory
base_dir = tempfile.mkdtemp()
parser = OptionParser()
parser.add_option('-u', '--u-boot',
default=os.path.join(base_path, 'u-boot'),
help='Select U-Boot sandbox binary')
parser.add_option('-k', '--keep', action='store_true',
help="Don't delete temporary directory even when tests pass")
parser.add_option('-t', '--selftest', action='store_true',
help='Run internal self tests')
(options, args) = parser.parse_args()
# Find the path to U-Boot, and assume mkimage is in its tools/mkimage dir
base_path = os.path.dirname(options.u_boot)
mkimage = os.path.join(base_path, 'tools/mkimage')
# There are a few doctests - handle these here
if options.selftest:
doctest.testmod()
return
title = 'FIT Tests'
print title, '\n', '=' * len(title)
run_fit_test(mkimage, options.u_boot)
print '\nTests passed'
print 'Caveat: this is only a sanity check - test coverage is poor'
# Remove the tempoerary directory unless we are asked to keep it
if options.keep:
print "Output files are in '%s'" % base_dir
else:
shutil.rmtree(base_dir)
run_tests()
| gpl-3.0 |
mahak/nova | nova/conf/hyperv.py | 4 | 10493 | # Copyright (c) 2016 TUBITAK BILGEM
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
hyperv_opt_group = cfg.OptGroup("hyperv",
title='The Hyper-V feature',
help="""
The hyperv feature allows you to configure the Hyper-V hypervisor
driver to be used within an OpenStack deployment.
""")
hyperv_opts = [
cfg.FloatOpt('dynamic_memory_ratio',
default=1.0,
help="""
Dynamic memory ratio
Enables dynamic memory allocation (ballooning) when set to a value
greater than 1. The value expresses the ratio between the total RAM
assigned to an instance and its startup RAM amount. For example a
ratio of 2.0 for an instance with 1024MB of RAM implies 512MB of
RAM allocated at startup.
Possible values:
* 1.0: Disables dynamic memory allocation (Default).
* Float values greater than 1.0: Enables allocation of total implied
RAM divided by this value for startup.
"""),
cfg.BoolOpt('enable_instance_metrics_collection',
default=False,
help="""
Enable instance metrics collection
Enables metrics collections for an instance by using Hyper-V's
metric APIs. Collected data can be retrieved by other apps and
services, e.g.: Ceilometer.
"""),
cfg.StrOpt('instances_path_share',
default="",
help="""
Instances path share
The name of a Windows share mapped to the "instances_path" dir
and used by the resize feature to copy files to the target host.
If left blank, an administrative share (hidden network share) will
be used, looking for the same "instances_path" used locally.
Possible values:
* "": An administrative share will be used (Default).
* Name of a Windows share.
Related options:
* "instances_path": The directory which will be used if this option
here is left blank.
"""),
cfg.BoolOpt('limit_cpu_features',
default=False,
help="""
Limit CPU features
This flag is needed to support live migration to hosts with
different CPU features and checked during instance creation
in order to limit the CPU features used by the instance.
"""),
cfg.IntOpt('mounted_disk_query_retry_count',
default=10,
min=0,
help="""
Mounted disk query retry count
The number of times to retry checking for a mounted disk.
The query runs until the device can be found or the retry
count is reached.
Possible values:
* Positive integer values. Values greater than 1 is recommended
(Default: 10).
Related options:
* Time interval between disk mount retries is declared with
"mounted_disk_query_retry_interval" option.
"""),
cfg.IntOpt('mounted_disk_query_retry_interval',
default=5,
min=0,
help="""
Mounted disk query retry interval
Interval between checks for a mounted disk, in seconds.
Possible values:
* Time in seconds (Default: 5).
Related options:
* This option is meaningful when the mounted_disk_query_retry_count
is greater than 1.
* The retry loop runs with mounted_disk_query_retry_count and
mounted_disk_query_retry_interval configuration options.
"""),
cfg.IntOpt('power_state_check_timeframe',
default=60,
min=0,
help="""
Power state check timeframe
The timeframe to be checked for instance power state changes.
This option is used to fetch the state of the instance from Hyper-V
through the WMI interface, within the specified timeframe.
Possible values:
* Timeframe in seconds (Default: 60).
"""),
cfg.IntOpt('power_state_event_polling_interval',
default=2,
min=0,
help="""
Power state event polling interval
Instance power state change event polling frequency. Sets the
listener interval for power state events to the given value.
This option enhances the internal lifecycle notifications of
instances that reboot themselves. It is unlikely that an operator
has to change this value.
Possible values:
* Time in seconds (Default: 2).
"""),
cfg.StrOpt('qemu_img_cmd',
default="qemu-img.exe",
help=r"""
qemu-img command
qemu-img is required for some of the image related operations
like converting between different image types. You can get it
from here: (http://qemu.weilnetz.de/) or you can install the
Cloudbase OpenStack Hyper-V Compute Driver
(https://cloudbase.it/openstack-hyperv-driver/) which automatically
sets the proper path for this config option. You can either give the
full path of qemu-img.exe or set its path in the PATH environment
variable and leave this option to the default value.
Possible values:
* Name of the qemu-img executable, in case it is in the same
directory as the nova-compute service or its path is in the
PATH environment variable (Default).
* Path of qemu-img command (DRIVELETTER:\PATH\TO\QEMU-IMG\COMMAND).
Related options:
* If the config_drive_cdrom option is False, qemu-img will be used to
convert the ISO to a VHD, otherwise the config drive will
remain an ISO. To use config drive with Hyper-V, you must
set the ``mkisofs_cmd`` value to the full path to an ``mkisofs.exe``
installation.
"""),
cfg.StrOpt('vswitch_name',
help="""
External virtual switch name
The Hyper-V Virtual Switch is a software-based layer-2 Ethernet
network switch that is available with the installation of the
Hyper-V server role. The switch includes programmatically managed
and extensible capabilities to connect virtual machines to both
virtual networks and the physical network. In addition, Hyper-V
Virtual Switch provides policy enforcement for security, isolation,
and service levels. The vSwitch represented by this config option
must be an external one (not internal or private).
Possible values:
* If not provided, the first of a list of available vswitches
is used. This list is queried using WQL.
* Virtual switch name.
"""),
cfg.IntOpt('wait_soft_reboot_seconds',
default=60,
min=0,
help="""
Wait soft reboot seconds
Number of seconds to wait for instance to shut down after soft
reboot request is made. We fall back to hard reboot if instance
does not shutdown within this window.
Possible values:
* Time in seconds (Default: 60).
"""),
cfg.BoolOpt('config_drive_cdrom',
default=False,
help="""
Mount config drive as a CD drive.
OpenStack can be configured to write instance metadata to a config drive, which
is then attached to the instance before it boots. The config drive can be
attached as a disk drive (default) or as a CD drive.
Related options:
* This option is meaningful with ``force_config_drive`` option set to ``True``
or when the REST API call to create an instance will have
``--config-drive=True`` flag.
* ``config_drive_format`` option must be set to ``iso9660`` in order to use
CD drive as the config drive image.
* To use config drive with Hyper-V, you must set the
``mkisofs_cmd`` value to the full path to an ``mkisofs.exe`` installation.
Additionally, you must set the ``qemu_img_cmd`` value to the full path
to an ``qemu-img`` command installation.
* You can configure the Compute service to always create a configuration
drive by setting the ``force_config_drive`` option to ``True``.
"""),
cfg.BoolOpt('config_drive_inject_password',
default=False,
help="""
Inject password to config drive.
When enabled, the admin password will be available from the config drive image.
Related options:
* This option is meaningful when used with other options that enable
config drive usage with Hyper-V, such as ``force_config_drive``.
"""),
cfg.IntOpt('volume_attach_retry_count',
default=10,
min=0,
help="""
Volume attach retry count
The number of times to retry attaching a volume. Volume attachment
is retried until success or the given retry count is reached.
Possible values:
* Positive integer values (Default: 10).
Related options:
* Time interval between attachment attempts is declared with
volume_attach_retry_interval option.
"""),
cfg.IntOpt('volume_attach_retry_interval',
default=5,
min=0,
help="""
Volume attach retry interval
Interval between volume attachment attempts, in seconds.
Possible values:
* Time in seconds (Default: 5).
Related options:
* This options is meaningful when volume_attach_retry_count
is greater than 1.
* The retry loop runs with volume_attach_retry_count and
volume_attach_retry_interval configuration options.
"""),
cfg.BoolOpt('enable_remotefx',
default=False,
help="""
Enable RemoteFX feature
This requires at least one DirectX 11 capable graphics adapter for
Windows / Hyper-V Server 2012 R2 or newer and RDS-Virtualization
feature has to be enabled.
Instances with RemoteFX can be requested with the following flavor
extra specs:
**os:resolution**. Guest VM screen resolution size. Acceptable values::
1024x768, 1280x1024, 1600x1200, 1920x1200, 2560x1600, 3840x2160
``3840x2160`` is only available on Windows / Hyper-V Server 2016.
**os:monitors**. Guest VM number of monitors. Acceptable values::
[1, 4] - Windows / Hyper-V Server 2012 R2
[1, 8] - Windows / Hyper-V Server 2016
**os:vram**. Guest VM VRAM amount. Only available on
Windows / Hyper-V Server 2016. Acceptable values::
64, 128, 256, 512, 1024
"""),
cfg.BoolOpt('use_multipath_io',
default=False,
help="""
Use multipath connections when attaching iSCSI or FC disks.
This requires the Multipath IO Windows feature to be enabled. MPIO must be
configured to claim such devices.
"""),
cfg.ListOpt('iscsi_initiator_list',
default=[],
help="""
List of iSCSI initiators that will be used for estabilishing iSCSI sessions.
If none are specified, the Microsoft iSCSI initiator service will choose the
initiator.
""")
]
def register_opts(conf):
conf.register_group(hyperv_opt_group)
conf.register_opts(hyperv_opts, group=hyperv_opt_group)
def list_opts():
return {hyperv_opt_group: hyperv_opts}
| apache-2.0 |
mjfarmer/scada_py | env/lib/python2.7/site-packages/zope/interface/tests/test_declarations.py | 18 | 57969 | ##############################################################################
#
# Copyright (c) 2003 Zope Foundation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""Test the new API for making and checking interface declarations
"""
import unittest
from zope.interface._compat import _skip_under_py3k, _u
class _Py3ClassAdvice(object):
def _run_generated_code(self, code, globs, locs,
fails_under_py3k=True,
):
import warnings
from zope.interface._compat import PYTHON3
with warnings.catch_warnings(record=True) as log:
warnings.resetwarnings()
if not PYTHON3:
exec(code, globs, locs)
self.assertEqual(len(log), 0) # no longer warn
return True
else:
try:
exec(code, globs, locs)
except TypeError:
return False
else:
if fails_under_py3k:
self.fail("Didn't raise TypeError")
class NamedTests(unittest.TestCase):
def test_class(self):
from zope.interface.declarations import named
@named(_u('foo'))
class Foo(object):
pass
self.assertEqual(Foo.__component_name__, _u('foo'))
def test_function(self):
from zope.interface.declarations import named
@named(_u('foo'))
def doFoo(object):
pass
self.assertEqual(doFoo.__component_name__, _u('foo'))
def test_instance(self):
from zope.interface.declarations import named
class Foo(object):
pass
foo = Foo()
named(_u('foo'))(foo)
self.assertEqual(foo.__component_name__, _u('foo'))
class DeclarationTests(unittest.TestCase):
def _getTargetClass(self):
from zope.interface.declarations import Declaration
return Declaration
def _makeOne(self, *args, **kw):
return self._getTargetClass()(*args, **kw)
def test_ctor_no_bases(self):
decl = self._makeOne()
self.assertEqual(list(decl.__bases__), [])
def test_ctor_w_interface_in_bases(self):
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass('IFoo')
decl = self._makeOne(IFoo)
self.assertEqual(list(decl.__bases__), [IFoo])
def test_ctor_w_implements_in_bases(self):
from zope.interface.declarations import Implements
impl = Implements()
decl = self._makeOne(impl)
self.assertEqual(list(decl.__bases__), [impl])
def test_changed_wo_existing__v_attrs(self):
decl = self._makeOne()
decl.changed(decl) # doesn't raise
self.assertFalse('_v_attrs' in decl.__dict__)
def test_changed_w_existing__v_attrs(self):
decl = self._makeOne()
decl._v_attrs = object()
decl.changed(decl)
self.assertFalse('_v_attrs' in decl.__dict__)
def test___contains__w_self(self):
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass('IFoo')
decl = self._makeOne()
self.assertFalse(decl in decl)
def test___contains__w_unrelated_iface(self):
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass('IFoo')
decl = self._makeOne()
self.assertFalse(IFoo in decl)
def test___contains__w_base_interface(self):
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass('IFoo')
decl = self._makeOne(IFoo)
self.assertTrue(IFoo in decl)
def test___iter___empty(self):
decl = self._makeOne()
self.assertEqual(list(decl), [])
def test___iter___single_base(self):
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass('IFoo')
decl = self._makeOne(IFoo)
self.assertEqual(list(decl), [IFoo])
def test___iter___multiple_bases(self):
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass('IFoo')
IBar = InterfaceClass('IBar')
decl = self._makeOne(IFoo, IBar)
self.assertEqual(list(decl), [IFoo, IBar])
def test___iter___inheritance(self):
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass('IFoo')
IBar = InterfaceClass('IBar', (IFoo,))
decl = self._makeOne(IBar)
self.assertEqual(list(decl), [IBar]) #IBar.interfaces() omits bases
def test___iter___w_nested_sequence_overlap(self):
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass('IFoo')
IBar = InterfaceClass('IBar')
decl = self._makeOne(IBar, (IFoo, IBar))
self.assertEqual(list(decl), [IBar, IFoo])
def test_flattened_empty(self):
from zope.interface.interface import Interface
decl = self._makeOne()
self.assertEqual(list(decl.flattened()), [Interface])
def test_flattened_single_base(self):
from zope.interface.interface import Interface
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass('IFoo')
decl = self._makeOne(IFoo)
self.assertEqual(list(decl.flattened()), [IFoo, Interface])
def test_flattened_multiple_bases(self):
from zope.interface.interface import Interface
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass('IFoo')
IBar = InterfaceClass('IBar')
decl = self._makeOne(IFoo, IBar)
self.assertEqual(list(decl.flattened()), [IFoo, IBar, Interface])
def test_flattened_inheritance(self):
from zope.interface.interface import Interface
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass('IFoo')
IBar = InterfaceClass('IBar', (IFoo,))
decl = self._makeOne(IBar)
self.assertEqual(list(decl.flattened()), [IBar, IFoo, Interface])
def test_flattened_w_nested_sequence_overlap(self):
from zope.interface.interface import Interface
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass('IFoo')
IBar = InterfaceClass('IBar')
decl = self._makeOne(IBar, (IFoo, IBar))
# Note that decl.__iro__ has IFoo first.
self.assertEqual(list(decl.flattened()), [IFoo, IBar, Interface])
def test___sub___unrelated_interface(self):
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass('IFoo')
IBar = InterfaceClass('IBar')
before = self._makeOne(IFoo)
after = before - IBar
self.assertTrue(isinstance(after, self._getTargetClass()))
self.assertEqual(list(after), [IFoo])
def test___sub___related_interface(self):
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass('IFoo')
before = self._makeOne(IFoo)
after = before - IFoo
self.assertEqual(list(after), [])
def test___sub___related_interface_by_inheritance(self):
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass('IFoo')
IBar = InterfaceClass('IBar', (IFoo,))
before = self._makeOne(IBar)
after = before - IBar
self.assertEqual(list(after), [])
def test___add___unrelated_interface(self):
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass('IFoo')
IBar = InterfaceClass('IBar')
before = self._makeOne(IFoo)
after = before + IBar
self.assertTrue(isinstance(after, self._getTargetClass()))
self.assertEqual(list(after), [IFoo, IBar])
def test___add___related_interface(self):
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass('IFoo')
IBar = InterfaceClass('IBar')
IBaz = InterfaceClass('IBaz')
before = self._makeOne(IFoo, IBar)
other = self._makeOne(IBar, IBaz)
after = before + other
self.assertEqual(list(after), [IFoo, IBar, IBaz])
class ImplementsTests(unittest.TestCase):
def _getTargetClass(self):
from zope.interface.declarations import Implements
return Implements
def _makeOne(self, *args, **kw):
return self._getTargetClass()(*args, **kw)
def test_ctor_no_bases(self):
impl = self._makeOne()
self.assertEqual(impl.inherit, None)
self.assertEqual(impl.declared, ())
self.assertEqual(impl.__name__, '?')
self.assertEqual(list(impl.__bases__), [])
def test___repr__(self):
impl = self._makeOne()
impl.__name__ = 'Testing'
self.assertEqual(repr(impl), '<implementedBy Testing>')
def test___reduce__(self):
from zope.interface.declarations import implementedBy
impl = self._makeOne()
self.assertEqual(impl.__reduce__(), (implementedBy, (None,)))
class Test_implementedByFallback(unittest.TestCase):
def _callFUT(self, *args, **kw):
from zope.interface.declarations import implementedByFallback
return implementedByFallback(*args, **kw)
def test_dictless_wo_existing_Implements_wo_registrations(self):
class Foo(object):
__slots__ = ('__implemented__',)
foo = Foo()
foo.__implemented__ = None
self.assertEqual(list(self._callFUT(foo)), [])
def test_dictless_wo_existing_Implements_cant_assign___implemented__(self):
class Foo(object):
def _get_impl(self): return None
def _set_impl(self, val): raise TypeError
__implemented__ = property(_get_impl, _set_impl)
def __call__(self): pass #act like a factory
foo = Foo()
self.assertRaises(TypeError, self._callFUT, foo)
def test_dictless_wo_existing_Implements_w_registrations(self):
from zope.interface import declarations
class Foo(object):
__slots__ = ('__implemented__',)
foo = Foo()
foo.__implemented__ = None
reg = object()
with _MonkeyDict(declarations,
'BuiltinImplementationSpecifications') as specs:
specs[foo] = reg
self.assertTrue(self._callFUT(foo) is reg)
def test_dictless_w_existing_Implements(self):
from zope.interface.declarations import Implements
impl = Implements()
class Foo(object):
__slots__ = ('__implemented__',)
foo = Foo()
foo.__implemented__ = impl
self.assertTrue(self._callFUT(foo) is impl)
def test_dictless_w_existing_not_Implements(self):
from zope.interface.interface import InterfaceClass
class Foo(object):
__slots__ = ('__implemented__',)
foo = Foo()
IFoo = InterfaceClass('IFoo')
foo.__implemented__ = (IFoo,)
self.assertEqual(list(self._callFUT(foo)), [IFoo])
def test_w_existing_attr_as_Implements(self):
from zope.interface.declarations import Implements
impl = Implements()
class Foo(object):
__implemented__ = impl
self.assertTrue(self._callFUT(Foo) is impl)
def test_builtins_added_to_cache(self):
from zope.interface import declarations
from zope.interface.declarations import Implements
from zope.interface._compat import _BUILTINS
with _MonkeyDict(declarations,
'BuiltinImplementationSpecifications') as specs:
self.assertEqual(list(self._callFUT(tuple)), [])
self.assertEqual(list(self._callFUT(list)), [])
self.assertEqual(list(self._callFUT(dict)), [])
for typ in (tuple, list, dict):
spec = specs[typ]
self.assertTrue(isinstance(spec, Implements))
self.assertEqual(repr(spec),
'<implementedBy %s.%s>'
% (_BUILTINS, typ.__name__))
def test_builtins_w_existing_cache(self):
from zope.interface import declarations
t_spec, l_spec, d_spec = object(), object(), object()
with _MonkeyDict(declarations,
'BuiltinImplementationSpecifications') as specs:
specs[tuple] = t_spec
specs[list] = l_spec
specs[dict] = d_spec
self.assertTrue(self._callFUT(tuple) is t_spec)
self.assertTrue(self._callFUT(list) is l_spec)
self.assertTrue(self._callFUT(dict) is d_spec)
def test_oldstyle_class_no_assertions(self):
# TODO: Figure out P3 story
class Foo:
pass
self.assertEqual(list(self._callFUT(Foo)), [])
def test_no_assertions(self):
# TODO: Figure out P3 story
class Foo(object):
pass
self.assertEqual(list(self._callFUT(Foo)), [])
def test_w_None_no_bases_not_factory(self):
class Foo(object):
__implemented__ = None
foo = Foo()
self.assertRaises(TypeError, self._callFUT, foo)
def test_w_None_no_bases_w_factory(self):
from zope.interface.declarations import objectSpecificationDescriptor
class Foo(object):
__implemented__ = None
def __call__(self):
pass
foo = Foo()
foo.__name__ = 'foo'
spec = self._callFUT(foo)
self.assertEqual(spec.__name__,
'zope.interface.tests.test_declarations.foo')
self.assertTrue(spec.inherit is foo)
self.assertTrue(foo.__implemented__ is spec)
self.assertTrue(foo.__providedBy__ is objectSpecificationDescriptor)
self.assertFalse('__provides__' in foo.__dict__)
def test_w_None_no_bases_w_class(self):
from zope.interface.declarations import ClassProvides
class Foo(object):
__implemented__ = None
spec = self._callFUT(Foo)
self.assertEqual(spec.__name__,
'zope.interface.tests.test_declarations.Foo')
self.assertTrue(spec.inherit is Foo)
self.assertTrue(Foo.__implemented__ is spec)
self.assertTrue(isinstance(Foo.__providedBy__, ClassProvides))
self.assertTrue(isinstance(Foo.__provides__, ClassProvides))
self.assertEqual(Foo.__provides__, Foo.__providedBy__)
def test_w_existing_Implements(self):
from zope.interface.declarations import Implements
impl = Implements()
class Foo(object):
__implemented__ = impl
self.assertTrue(self._callFUT(Foo) is impl)
class Test_implementedBy(Test_implementedByFallback):
# Repeat tests for C optimizations
def _callFUT(self, *args, **kw):
from zope.interface.declarations import implementedBy
return implementedBy(*args, **kw)
class Test_classImplementsOnly(unittest.TestCase):
def _callFUT(self, *args, **kw):
from zope.interface.declarations import classImplementsOnly
return classImplementsOnly(*args, **kw)
def test_no_existing(self):
from zope.interface.declarations import ClassProvides
from zope.interface.interface import InterfaceClass
class Foo(object):
pass
ifoo = InterfaceClass('IFoo')
self._callFUT(Foo, ifoo)
spec = Foo.__implemented__
self.assertEqual(spec.__name__,
'zope.interface.tests.test_declarations.Foo')
self.assertTrue(spec.inherit is None)
self.assertTrue(Foo.__implemented__ is spec)
self.assertTrue(isinstance(Foo.__providedBy__, ClassProvides))
self.assertTrue(isinstance(Foo.__provides__, ClassProvides))
self.assertEqual(Foo.__provides__, Foo.__providedBy__)
def test_w_existing_Implements(self):
from zope.interface.declarations import Implements
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass('IFoo')
IBar = InterfaceClass('IBar')
impl = Implements(IFoo)
impl.declared = (IFoo,)
class Foo(object):
__implemented__ = impl
impl.inherit = Foo
self._callFUT(Foo, IBar)
# Same spec, now different values
self.assertTrue(Foo.__implemented__ is impl)
self.assertEqual(impl.inherit, None)
self.assertEqual(impl.declared, (IBar,))
class Test_classImplements(unittest.TestCase):
def _callFUT(self, *args, **kw):
from zope.interface.declarations import classImplements
return classImplements(*args, **kw)
def test_no_existing(self):
from zope.interface.declarations import ClassProvides
from zope.interface.interface import InterfaceClass
class Foo(object):
pass
IFoo = InterfaceClass('IFoo')
self._callFUT(Foo, IFoo)
spec = Foo.__implemented__
self.assertEqual(spec.__name__,
'zope.interface.tests.test_declarations.Foo')
self.assertTrue(spec.inherit is Foo)
self.assertTrue(Foo.__implemented__ is spec)
self.assertTrue(isinstance(Foo.__providedBy__, ClassProvides))
self.assertTrue(isinstance(Foo.__provides__, ClassProvides))
self.assertEqual(Foo.__provides__, Foo.__providedBy__)
def test_w_existing_Implements(self):
from zope.interface.declarations import Implements
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass('IFoo')
IBar = InterfaceClass('IBar')
impl = Implements(IFoo)
impl.declared = (IFoo,)
class Foo(object):
__implemented__ = impl
impl.inherit = Foo
self._callFUT(Foo, IBar)
# Same spec, now different values
self.assertTrue(Foo.__implemented__ is impl)
self.assertEqual(impl.inherit, Foo)
self.assertEqual(impl.declared, (IFoo, IBar,))
def test_w_existing_Implements_w_bases(self):
from zope.interface.declarations import Implements
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass('IFoo')
IBar = InterfaceClass('IBar')
IBaz = InterfaceClass('IBaz', IFoo)
b_impl = Implements(IBaz)
impl = Implements(IFoo)
impl.declared = (IFoo,)
class Base1(object):
__implemented__ = b_impl
class Base2(object):
__implemented__ = b_impl
class Foo(Base1, Base2):
__implemented__ = impl
impl.inherit = Foo
self._callFUT(Foo, IBar)
# Same spec, now different values
self.assertTrue(Foo.__implemented__ is impl)
self.assertEqual(impl.inherit, Foo)
self.assertEqual(impl.declared, (IFoo, IBar,))
self.assertEqual(impl.__bases__, (IFoo, IBar, b_impl))
class Test__implements_advice(unittest.TestCase):
def _callFUT(self, *args, **kw):
from zope.interface.declarations import _implements_advice
return _implements_advice(*args, **kw)
def test_no_existing_implements(self):
from zope.interface.declarations import classImplements
from zope.interface.declarations import Implements
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass('IFoo')
class Foo(object):
__implements_advice_data__ = ((IFoo,), classImplements)
self._callFUT(Foo)
self.assertFalse('__implements_advice_data__' in Foo.__dict__)
self.assertTrue(isinstance(Foo.__implemented__, Implements))
self.assertEqual(list(Foo.__implemented__), [IFoo])
class Test_implementer(unittest.TestCase):
def _getTargetClass(self):
from zope.interface.declarations import implementer
return implementer
def _makeOne(self, *args, **kw):
return self._getTargetClass()(*args, **kw)
def test_oldstyle_class(self):
# TODO Py3 story
from zope.interface.declarations import ClassProvides
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass('IFoo')
class Foo:
pass
decorator = self._makeOne(IFoo)
returned = decorator(Foo)
self.assertTrue(returned is Foo)
spec = Foo.__implemented__
self.assertEqual(spec.__name__,
'zope.interface.tests.test_declarations.Foo')
self.assertTrue(spec.inherit is Foo)
self.assertTrue(Foo.__implemented__ is spec)
self.assertTrue(isinstance(Foo.__providedBy__, ClassProvides))
self.assertTrue(isinstance(Foo.__provides__, ClassProvides))
self.assertEqual(Foo.__provides__, Foo.__providedBy__)
def test_newstyle_class(self):
from zope.interface.declarations import ClassProvides
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass('IFoo')
class Foo(object):
pass
decorator = self._makeOne(IFoo)
returned = decorator(Foo)
self.assertTrue(returned is Foo)
spec = Foo.__implemented__
self.assertEqual(spec.__name__,
'zope.interface.tests.test_declarations.Foo')
self.assertTrue(spec.inherit is Foo)
self.assertTrue(Foo.__implemented__ is spec)
self.assertTrue(isinstance(Foo.__providedBy__, ClassProvides))
self.assertTrue(isinstance(Foo.__provides__, ClassProvides))
self.assertEqual(Foo.__provides__, Foo.__providedBy__)
def test_nonclass_cannot_assign_attr(self):
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass('IFoo')
decorator = self._makeOne(IFoo)
self.assertRaises(TypeError, decorator, object())
def test_nonclass_can_assign_attr(self):
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass('IFoo')
class Foo(object):
pass
foo = Foo()
decorator = self._makeOne(IFoo)
returned = decorator(foo)
self.assertTrue(returned is foo)
spec = foo.__implemented__
self.assertEqual(spec.__name__, '?')
self.assertTrue(spec.inherit is None)
self.assertTrue(foo.__implemented__ is spec)
class Test_implementer_only(unittest.TestCase):
def _getTargetClass(self):
from zope.interface.declarations import implementer_only
return implementer_only
def _makeOne(self, *args, **kw):
return self._getTargetClass()(*args, **kw)
def test_function(self):
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass('IFoo')
decorator = self._makeOne(IFoo)
def _function(): pass
self.assertRaises(ValueError, decorator, _function)
def test_method(self):
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass('IFoo')
decorator = self._makeOne(IFoo)
class Bar:
def _method(): pass
self.assertRaises(ValueError, decorator, Bar._method)
def test_oldstyle_class(self):
# TODO Py3 story
from zope.interface.declarations import Implements
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass('IFoo')
IBar = InterfaceClass('IBar')
old_spec = Implements(IBar)
class Foo:
__implemented__ = old_spec
decorator = self._makeOne(IFoo)
returned = decorator(Foo)
self.assertTrue(returned is Foo)
spec = Foo.__implemented__
self.assertEqual(spec.__name__, '?')
self.assertTrue(spec.inherit is None)
self.assertTrue(Foo.__implemented__ is spec)
def test_newstyle_class(self):
from zope.interface.declarations import Implements
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass('IFoo')
IBar = InterfaceClass('IBar')
old_spec = Implements(IBar)
class Foo(object):
__implemented__ = old_spec
decorator = self._makeOne(IFoo)
returned = decorator(Foo)
self.assertTrue(returned is Foo)
spec = Foo.__implemented__
self.assertEqual(spec.__name__, '?')
self.assertTrue(spec.inherit is None)
self.assertTrue(Foo.__implemented__ is spec)
# Test '_implements' by way of 'implements{,Only}', its only callers.
class Test_implementsOnly(unittest.TestCase, _Py3ClassAdvice):
def _getFUT(self):
from zope.interface.declarations import implementsOnly
return implementsOnly
def test_simple(self):
import warnings
from zope.interface.declarations import implementsOnly
from zope.interface._compat import PYTHON3
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass("IFoo")
globs = {'implementsOnly': implementsOnly,
'IFoo': IFoo,
}
locs = {}
CODE = "\n".join([
'class Foo(object):'
' implementsOnly(IFoo)',
])
with warnings.catch_warnings(record=True) as log:
warnings.resetwarnings()
try:
exec(CODE, globs, locs)
except TypeError:
if not PYTHON3:
raise
else:
if PYTHON3:
self.fail("Didn't raise TypeError")
Foo = locs['Foo']
spec = Foo.__implemented__
self.assertEqual(list(spec), [IFoo])
self.assertEqual(len(log), 0) # no longer warn
def test_called_once_from_class_w_bases(self):
from zope.interface.declarations import implements
from zope.interface.declarations import implementsOnly
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass("IFoo")
IBar = InterfaceClass("IBar")
globs = {'implements': implements,
'implementsOnly': implementsOnly,
'IFoo': IFoo,
'IBar': IBar,
}
locs = {}
CODE = "\n".join([
'class Foo(object):',
' implements(IFoo)',
'class Bar(Foo):'
' implementsOnly(IBar)',
])
if self._run_generated_code(CODE, globs, locs):
Bar = locs['Bar']
spec = Bar.__implemented__
self.assertEqual(list(spec), [IBar])
class Test_implements(unittest.TestCase, _Py3ClassAdvice):
def _getFUT(self):
from zope.interface.declarations import implements
return implements
def test_called_from_function(self):
import warnings
from zope.interface.declarations import implements
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass("IFoo")
globs = {'implements': implements, 'IFoo': IFoo}
locs = {}
CODE = "\n".join([
'def foo():',
' implements(IFoo)'
])
if self._run_generated_code(CODE, globs, locs, False):
foo = locs['foo']
with warnings.catch_warnings(record=True) as log:
warnings.resetwarnings()
self.assertRaises(TypeError, foo)
self.assertEqual(len(log), 0) # no longer warn
def test_called_twice_from_class(self):
import warnings
from zope.interface.declarations import implements
from zope.interface.interface import InterfaceClass
from zope.interface._compat import PYTHON3
IFoo = InterfaceClass("IFoo")
IBar = InterfaceClass("IBar")
globs = {'implements': implements, 'IFoo': IFoo, 'IBar': IBar}
locs = {}
CODE = "\n".join([
'class Foo(object):',
' implements(IFoo)',
' implements(IBar)',
])
with warnings.catch_warnings(record=True) as log:
warnings.resetwarnings()
try:
exec(CODE, globs, locs)
except TypeError:
if not PYTHON3:
self.assertEqual(len(log), 0) # no longer warn
else:
self.fail("Didn't raise TypeError")
def test_called_once_from_class(self):
from zope.interface.declarations import implements
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass("IFoo")
globs = {'implements': implements, 'IFoo': IFoo}
locs = {}
CODE = "\n".join([
'class Foo(object):',
' implements(IFoo)',
])
if self._run_generated_code(CODE, globs, locs):
Foo = locs['Foo']
spec = Foo.__implemented__
self.assertEqual(list(spec), [IFoo])
class ProvidesClassTests(unittest.TestCase):
def _getTargetClass(self):
from zope.interface.declarations import ProvidesClass
return ProvidesClass
def _makeOne(self, *args, **kw):
return self._getTargetClass()(*args, **kw)
def test_simple_class_one_interface(self):
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass("IFoo")
class Foo(object):
pass
spec = self._makeOne(Foo, IFoo)
self.assertEqual(list(spec), [IFoo])
def test___reduce__(self):
from zope.interface.declarations import Provides # the function
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass("IFoo")
class Foo(object):
pass
spec = self._makeOne(Foo, IFoo)
klass, args = spec.__reduce__()
self.assertTrue(klass is Provides)
self.assertEqual(args, (Foo, IFoo))
def test___get___class(self):
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass("IFoo")
class Foo(object):
pass
spec = self._makeOne(Foo, IFoo)
Foo.__provides__ = spec
self.assertTrue(Foo.__provides__ is spec)
def test___get___instance(self):
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass("IFoo")
class Foo(object):
pass
spec = self._makeOne(Foo, IFoo)
Foo.__provides__ = spec
def _test():
foo = Foo()
return foo.__provides__
self.assertRaises(AttributeError, _test)
class Test_Provides(unittest.TestCase):
def _callFUT(self, *args, **kw):
from zope.interface.declarations import Provides
return Provides(*args, **kw)
def test_no_cached_spec(self):
from zope.interface import declarations
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass("IFoo")
cache = {}
class Foo(object):
pass
with _Monkey(declarations, InstanceDeclarations=cache):
spec = self._callFUT(Foo, IFoo)
self.assertEqual(list(spec), [IFoo])
self.assertTrue(cache[(Foo, IFoo)] is spec)
def test_w_cached_spec(self):
from zope.interface import declarations
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass("IFoo")
prior = object()
class Foo(object):
pass
cache = {(Foo, IFoo): prior}
with _Monkey(declarations, InstanceDeclarations=cache):
spec = self._callFUT(Foo, IFoo)
self.assertTrue(spec is prior)
class Test_directlyProvides(unittest.TestCase):
def _callFUT(self, *args, **kw):
from zope.interface.declarations import directlyProvides
return directlyProvides(*args, **kw)
def test_w_normal_object(self):
from zope.interface.declarations import ProvidesClass
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass("IFoo")
class Foo(object):
pass
obj = Foo()
self._callFUT(obj, IFoo)
self.assertTrue(isinstance(obj.__provides__, ProvidesClass))
self.assertEqual(list(obj.__provides__), [IFoo])
def test_w_class(self):
from zope.interface.declarations import ClassProvides
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass("IFoo")
class Foo(object):
pass
self._callFUT(Foo, IFoo)
self.assertTrue(isinstance(Foo.__provides__, ClassProvides))
self.assertEqual(list(Foo.__provides__), [IFoo])
@_skip_under_py3k
def test_w_non_descriptor_aware_metaclass(self):
# There are no non-descriptor-aware types in Py3k
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass("IFoo")
class MetaClass(type):
def __getattribute__(self, name):
# Emulate metaclass whose base is not the type object.
if name == '__class__':
return self
return type.__getattribute__(self, name)
class Foo(object):
__metaclass__ = MetaClass
obj = Foo()
self.assertRaises(TypeError, self._callFUT, obj, IFoo)
def test_w_classless_object(self):
from zope.interface.declarations import ProvidesClass
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass("IFoo")
the_dict = {}
class Foo(object):
def __getattribute__(self, name):
# Emulate object w/o any class
if name == '__class__':
return None
try:
return the_dict[name]
except KeyError:
raise AttributeError(name)
def __setattr__(self, name, value):
the_dict[name] = value
obj = Foo()
self._callFUT(obj, IFoo)
self.assertTrue(isinstance(the_dict['__provides__'], ProvidesClass))
self.assertEqual(list(the_dict['__provides__']), [IFoo])
class Test_alsoProvides(unittest.TestCase):
def _callFUT(self, *args, **kw):
from zope.interface.declarations import alsoProvides
return alsoProvides(*args, **kw)
def test_wo_existing_provides(self):
from zope.interface.declarations import ProvidesClass
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass("IFoo")
class Foo(object):
pass
obj = Foo()
self._callFUT(obj, IFoo)
self.assertTrue(isinstance(obj.__provides__, ProvidesClass))
self.assertEqual(list(obj.__provides__), [IFoo])
def test_w_existing_provides(self):
from zope.interface.declarations import directlyProvides
from zope.interface.declarations import ProvidesClass
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass("IFoo")
IBar = InterfaceClass("IBar")
class Foo(object):
pass
obj = Foo()
directlyProvides(obj, IFoo)
self._callFUT(obj, IBar)
self.assertTrue(isinstance(obj.__provides__, ProvidesClass))
self.assertEqual(list(obj.__provides__), [IFoo, IBar])
class Test_noLongerProvides(unittest.TestCase):
def _callFUT(self, *args, **kw):
from zope.interface.declarations import noLongerProvides
return noLongerProvides(*args, **kw)
def test_wo_existing_provides(self):
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass("IFoo")
class Foo(object):
pass
obj = Foo()
self._callFUT(obj, IFoo)
self.assertEqual(list(obj.__provides__), [])
def test_w_existing_provides_hit(self):
from zope.interface.declarations import directlyProvides
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass("IFoo")
class Foo(object):
pass
obj = Foo()
directlyProvides(obj, IFoo)
self._callFUT(obj, IFoo)
self.assertEqual(list(obj.__provides__), [])
def test_w_existing_provides_miss(self):
from zope.interface.declarations import directlyProvides
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass("IFoo")
IBar = InterfaceClass("IBar")
class Foo(object):
pass
obj = Foo()
directlyProvides(obj, IFoo)
self._callFUT(obj, IBar)
self.assertEqual(list(obj.__provides__), [IFoo])
def test_w_iface_implemented_by_class(self):
from zope.interface.declarations import implementer
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass("IFoo")
@implementer(IFoo)
class Foo(object):
pass
obj = Foo()
self.assertRaises(ValueError, self._callFUT, obj, IFoo)
class ClassProvidesBaseFallbackTests(unittest.TestCase):
def _getTargetClass(self):
from zope.interface.declarations import ClassProvidesBaseFallback
return ClassProvidesBaseFallback
def _makeOne(self, klass, implements):
# Don't instantiate directly: the C version can't have attributes
# assigned.
class Derived(self._getTargetClass()):
def __init__(self, k, i):
self._cls = k
self._implements = i
return Derived(klass, implements)
def test_w_same_class_via_class(self):
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass("IFoo")
class Foo(object):
pass
cpbp = Foo.__provides__ = self._makeOne(Foo, IFoo)
self.assertTrue(Foo.__provides__ is cpbp)
def test_w_same_class_via_instance(self):
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass("IFoo")
class Foo(object):
pass
foo = Foo()
cpbp = Foo.__provides__ = self._makeOne(Foo, IFoo)
self.assertTrue(foo.__provides__ is IFoo)
def test_w_different_class(self):
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass("IFoo")
class Foo(object):
pass
class Bar(Foo):
pass
bar = Bar()
cpbp = Foo.__provides__ = self._makeOne(Foo, IFoo)
self.assertRaises(AttributeError, getattr, Bar, '__provides__')
self.assertRaises(AttributeError, getattr, bar, '__provides__')
class ClassProvidesBaseTests(ClassProvidesBaseFallbackTests):
# Repeat tests for C optimizations
def _getTargetClass(self):
from zope.interface.declarations import ClassProvidesBase
return ClassProvidesBase
class ClassProvidesTests(unittest.TestCase):
def _getTargetClass(self):
from zope.interface.declarations import ClassProvides
return ClassProvides
def _makeOne(self, *args, **kw):
return self._getTargetClass()(*args, **kw)
def test_w_simple_metaclass(self):
from zope.interface.declarations import implementer
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass("IFoo")
IBar = InterfaceClass("IBar")
@implementer(IFoo)
class Foo(object):
pass
cp = Foo.__provides__ = self._makeOne(Foo, type(Foo), IBar)
self.assertTrue(Foo.__provides__ is cp)
self.assertEqual(list(Foo().__provides__), [IFoo])
def test___reduce__(self):
from zope.interface.declarations import implementer
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass("IFoo")
IBar = InterfaceClass("IBar")
@implementer(IFoo)
class Foo(object):
pass
cp = Foo.__provides__ = self._makeOne(Foo, type(Foo), IBar)
self.assertEqual(cp.__reduce__(),
(self._getTargetClass(), (Foo, type(Foo), IBar)))
class Test_directlyProvidedBy(unittest.TestCase):
def _callFUT(self, *args, **kw):
from zope.interface.declarations import directlyProvidedBy
return directlyProvidedBy(*args, **kw)
def test_wo_declarations_in_class_or_instance(self):
class Foo(object):
pass
foo = Foo()
self.assertEqual(list(self._callFUT(foo)), [])
def test_w_declarations_in_class_but_not_instance(self):
from zope.interface.declarations import implementer
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass("IFoo")
@implementer(IFoo)
class Foo(object):
pass
foo = Foo()
self.assertEqual(list(self._callFUT(foo)), [])
def test_w_declarations_in_instance_but_not_class(self):
from zope.interface.declarations import directlyProvides
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass("IFoo")
class Foo(object):
pass
foo = Foo()
directlyProvides(foo, IFoo)
self.assertEqual(list(self._callFUT(foo)), [IFoo])
def test_w_declarations_in_instance_and_class(self):
from zope.interface.declarations import directlyProvides
from zope.interface.declarations import implementer
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass("IFoo")
IBar = InterfaceClass("IBar")
@implementer(IFoo)
class Foo(object):
pass
foo = Foo()
directlyProvides(foo, IBar)
self.assertEqual(list(self._callFUT(foo)), [IBar])
class Test_classProvides(unittest.TestCase, _Py3ClassAdvice):
def _getFUT(self):
from zope.interface.declarations import classProvides
return classProvides
def test_called_from_function(self):
import warnings
from zope.interface.declarations import classProvides
from zope.interface.interface import InterfaceClass
from zope.interface._compat import PYTHON3
IFoo = InterfaceClass("IFoo")
globs = {'classProvides': classProvides, 'IFoo': IFoo}
locs = {}
CODE = "\n".join([
'def foo():',
' classProvides(IFoo)'
])
exec(CODE, globs, locs)
foo = locs['foo']
with warnings.catch_warnings(record=True) as log:
warnings.resetwarnings()
self.assertRaises(TypeError, foo)
if not PYTHON3:
self.assertEqual(len(log), 0) # no longer warn
def test_called_twice_from_class(self):
import warnings
from zope.interface.declarations import classProvides
from zope.interface.interface import InterfaceClass
from zope.interface._compat import PYTHON3
IFoo = InterfaceClass("IFoo")
IBar = InterfaceClass("IBar")
globs = {'classProvides': classProvides, 'IFoo': IFoo, 'IBar': IBar}
locs = {}
CODE = "\n".join([
'class Foo(object):',
' classProvides(IFoo)',
' classProvides(IBar)',
])
with warnings.catch_warnings(record=True) as log:
warnings.resetwarnings()
try:
exec(CODE, globs, locs)
except TypeError:
if not PYTHON3:
self.assertEqual(len(log), 0) # no longer warn
else:
self.fail("Didn't raise TypeError")
def test_called_once_from_class(self):
from zope.interface.declarations import classProvides
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass("IFoo")
globs = {'classProvides': classProvides, 'IFoo': IFoo}
locs = {}
CODE = "\n".join([
'class Foo(object):',
' classProvides(IFoo)',
])
if self._run_generated_code(CODE, globs, locs):
Foo = locs['Foo']
spec = Foo.__providedBy__
self.assertEqual(list(spec), [IFoo])
# Test _classProvides_advice through classProvides, its only caller.
class Test_provider(unittest.TestCase):
def _getTargetClass(self):
from zope.interface.declarations import provider
return provider
def _makeOne(self, *args, **kw):
return self._getTargetClass()(*args, **kw)
def test_w_class(self):
from zope.interface.declarations import ClassProvides
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass("IFoo")
@self._makeOne(IFoo)
class Foo(object):
pass
self.assertTrue(isinstance(Foo.__provides__, ClassProvides))
self.assertEqual(list(Foo.__provides__), [IFoo])
class Test_moduleProvides(unittest.TestCase):
def _getFUT(self):
from zope.interface.declarations import moduleProvides
return moduleProvides
def test_called_from_function(self):
from zope.interface.declarations import moduleProvides
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass("IFoo")
globs = {'__name__': 'zope.interface.tests.foo',
'moduleProvides': moduleProvides, 'IFoo': IFoo}
locs = {}
CODE = "\n".join([
'def foo():',
' moduleProvides(IFoo)'
])
exec(CODE, globs, locs)
foo = locs['foo']
self.assertRaises(TypeError, foo)
def test_called_from_class(self):
from zope.interface.declarations import moduleProvides
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass("IFoo")
globs = {'__name__': 'zope.interface.tests.foo',
'moduleProvides': moduleProvides, 'IFoo': IFoo}
locs = {}
CODE = "\n".join([
'class Foo(object):',
' moduleProvides(IFoo)',
])
try:
exec(CODE, globs, locs)
except TypeError:
pass
else:
assert False, 'TypeError not raised'
def test_called_once_from_module_scope(self):
from zope.interface.declarations import moduleProvides
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass("IFoo")
globs = {'__name__': 'zope.interface.tests.foo',
'moduleProvides': moduleProvides, 'IFoo': IFoo}
CODE = "\n".join([
'moduleProvides(IFoo)',
])
exec(CODE, globs)
spec = globs['__provides__']
self.assertEqual(list(spec), [IFoo])
def test_called_twice_from_module_scope(self):
from zope.interface.declarations import moduleProvides
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass("IFoo")
globs = {'__name__': 'zope.interface.tests.foo',
'moduleProvides': moduleProvides, 'IFoo': IFoo}
locs = {}
CODE = "\n".join([
'moduleProvides(IFoo)',
'moduleProvides(IFoo)',
])
try:
exec(CODE, globs)
except TypeError:
pass
else:
assert False, 'TypeError not raised'
class Test_getObjectSpecificationFallback(unittest.TestCase):
def _callFUT(self, *args, **kw):
from zope.interface.declarations import getObjectSpecificationFallback
return getObjectSpecificationFallback(*args, **kw)
def test_wo_existing_provides_classless(self):
the_dict = {}
class Foo(object):
def __getattribute__(self, name):
# Emulate object w/o any class
if name == '__class__':
raise AttributeError(name)
try:
return the_dict[name]
except KeyError:
raise AttributeError(name)
def __setattr__(self, name, value):
the_dict[name] = value
foo = Foo()
spec = self._callFUT(foo)
self.assertEqual(list(spec), [])
def test_existing_provides_is_spec(self):
from zope.interface.declarations import directlyProvides
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass("IFoo")
def foo():
pass
directlyProvides(foo, IFoo)
spec = self._callFUT(foo)
self.assertTrue(spec is foo.__provides__)
def test_existing_provides_is_not_spec(self):
def foo():
pass
foo.__provides__ = object() # not a valid spec
spec = self._callFUT(foo)
self.assertEqual(list(spec), [])
def test_existing_provides(self):
from zope.interface.declarations import directlyProvides
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass("IFoo")
class Foo(object):
pass
foo = Foo()
directlyProvides(foo, IFoo)
spec = self._callFUT(foo)
self.assertEqual(list(spec), [IFoo])
def test_wo_provides_on_class_w_implements(self):
from zope.interface.declarations import implementer
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass("IFoo")
@implementer(IFoo)
class Foo(object):
pass
foo = Foo()
spec = self._callFUT(foo)
self.assertEqual(list(spec), [IFoo])
def test_wo_provides_on_class_wo_implements(self):
class Foo(object):
pass
foo = Foo()
spec = self._callFUT(foo)
self.assertEqual(list(spec), [])
class Test_getObjectSpecification(Test_getObjectSpecificationFallback):
# Repeat tests for C optimizations
def _callFUT(self, *args, **kw):
from zope.interface.declarations import getObjectSpecification
return getObjectSpecification(*args, **kw)
class Test_providedByFallback(unittest.TestCase):
def _callFUT(self, *args, **kw):
from zope.interface.declarations import providedByFallback
return providedByFallback(*args, **kw)
def test_wo_providedBy_on_class_wo_implements(self):
class Foo(object):
pass
foo = Foo()
spec = self._callFUT(foo)
self.assertEqual(list(spec), [])
def test_w_providedBy_valid_spec(self):
from zope.interface.declarations import Provides
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass("IFoo")
class Foo(object):
pass
foo = Foo()
foo.__providedBy__ = Provides(Foo, IFoo)
spec = self._callFUT(foo)
self.assertEqual(list(spec), [IFoo])
def test_w_providedBy_invalid_spec(self):
class Foo(object):
pass
foo = Foo()
foo.__providedBy__ = object()
spec = self._callFUT(foo)
self.assertEqual(list(spec), [])
def test_w_providedBy_invalid_spec_class_w_implements(self):
from zope.interface.declarations import implementer
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass("IFoo")
@implementer(IFoo)
class Foo(object):
pass
foo = Foo()
foo.__providedBy__ = object()
spec = self._callFUT(foo)
self.assertEqual(list(spec), [IFoo])
def test_w_providedBy_invalid_spec_w_provides_no_provides_on_class(self):
class Foo(object):
pass
foo = Foo()
foo.__providedBy__ = object()
expected = foo.__provides__ = object()
spec = self._callFUT(foo)
self.assertTrue(spec is expected)
def test_w_providedBy_invalid_spec_w_provides_diff_provides_on_class(self):
class Foo(object):
pass
foo = Foo()
foo.__providedBy__ = object()
expected = foo.__provides__ = object()
Foo.__provides__ = object()
spec = self._callFUT(foo)
self.assertTrue(spec is expected)
def test_w_providedBy_invalid_spec_w_provides_same_provides_on_class(self):
from zope.interface.declarations import implementer
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass("IFoo")
@implementer(IFoo)
class Foo(object):
pass
foo = Foo()
foo.__providedBy__ = object()
foo.__provides__ = Foo.__provides__ = object()
spec = self._callFUT(foo)
self.assertEqual(list(spec), [IFoo])
class Test_providedBy(Test_providedByFallback):
# Repeat tests for C optimizations
def _callFUT(self, *args, **kw):
from zope.interface.declarations import providedBy
return providedBy(*args, **kw)
class ObjectSpecificationDescriptorFallbackTests(unittest.TestCase):
def _getTargetClass(self):
from zope.interface.declarations \
import ObjectSpecificationDescriptorFallback
return ObjectSpecificationDescriptorFallback
def _makeOne(self, *args, **kw):
return self._getTargetClass()(*args, **kw)
def test_accessed_via_class(self):
from zope.interface.declarations import Provides
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass("IFoo")
class Foo(object):
pass
Foo.__provides__ = Provides(Foo, IFoo)
Foo.__providedBy__ = self._makeOne()
self.assertEqual(list(Foo.__providedBy__), [IFoo])
def test_accessed_via_inst_wo_provides(self):
from zope.interface.declarations import implementer
from zope.interface.declarations import Provides
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass("IFoo")
IBar = InterfaceClass("IBar")
@implementer(IFoo)
class Foo(object):
pass
Foo.__provides__ = Provides(Foo, IBar)
Foo.__providedBy__ = self._makeOne()
foo = Foo()
self.assertEqual(list(foo.__providedBy__), [IFoo])
def test_accessed_via_inst_w_provides(self):
from zope.interface.declarations import directlyProvides
from zope.interface.declarations import implementer
from zope.interface.declarations import Provides
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass("IFoo")
IBar = InterfaceClass("IBar")
IBaz = InterfaceClass("IBaz")
@implementer(IFoo)
class Foo(object):
pass
Foo.__provides__ = Provides(Foo, IBar)
Foo.__providedBy__ = self._makeOne()
foo = Foo()
directlyProvides(foo, IBaz)
self.assertEqual(list(foo.__providedBy__), [IBaz, IFoo])
class ObjectSpecificationDescriptorTests(
ObjectSpecificationDescriptorFallbackTests):
# Repeat tests for C optimizations
def _getTargetClass(self):
from zope.interface.declarations import ObjectSpecificationDescriptor
return ObjectSpecificationDescriptor
# Test _normalizeargs through its callers.
class _Monkey(object):
# context-manager for replacing module names in the scope of a test.
def __init__(self, module, **kw):
self.module = module
self.to_restore = dict([(key, getattr(module, key)) for key in kw])
for key, value in kw.items():
setattr(module, key, value)
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
for key, value in self.to_restore.items():
setattr(self.module, key, value)
class _MonkeyDict(object):
# context-manager for restoring a dict w/in a module in the scope of a test.
def __init__(self, module, attrname, **kw):
self.module = module
self.target = getattr(module, attrname)
self.to_restore = self.target.copy()
self.target.clear()
self.target.update(kw)
def __enter__(self):
return self.target
def __exit__(self, exc_type, exc_val, exc_tb):
self.target.clear()
self.target.update(self.to_restore)
def test_suite():
return unittest.TestSuite((
unittest.makeSuite(DeclarationTests),
unittest.makeSuite(ImplementsTests),
unittest.makeSuite(Test_implementedByFallback),
unittest.makeSuite(Test_implementedBy),
unittest.makeSuite(Test_classImplementsOnly),
unittest.makeSuite(Test_classImplements),
unittest.makeSuite(Test__implements_advice),
unittest.makeSuite(Test_implementer),
unittest.makeSuite(Test_implementer_only),
unittest.makeSuite(Test_implements),
unittest.makeSuite(Test_implementsOnly),
unittest.makeSuite(ProvidesClassTests),
unittest.makeSuite(Test_Provides),
unittest.makeSuite(Test_directlyProvides),
unittest.makeSuite(Test_alsoProvides),
unittest.makeSuite(Test_noLongerProvides),
unittest.makeSuite(ClassProvidesBaseFallbackTests),
unittest.makeSuite(ClassProvidesTests),
unittest.makeSuite(Test_directlyProvidedBy),
unittest.makeSuite(Test_classProvides),
unittest.makeSuite(Test_provider),
unittest.makeSuite(Test_moduleProvides),
unittest.makeSuite(Test_getObjectSpecificationFallback),
unittest.makeSuite(Test_getObjectSpecification),
unittest.makeSuite(Test_providedByFallback),
unittest.makeSuite(Test_providedBy),
unittest.makeSuite(ObjectSpecificationDescriptorFallbackTests),
unittest.makeSuite(ObjectSpecificationDescriptorTests),
))
| gpl-3.0 |
yeming233/rally | tests/unit/plugins/openstack/scenarios/ironic/test_utils.py | 1 | 2990 | # Copyright 2015: Mirantis Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from rally.plugins.openstack.scenarios.ironic import utils
from tests.unit import test
IRONIC_UTILS = "rally.plugins.openstack.scenarios.ironic.utils"
class IronicScenarioTestCase(test.ScenarioTestCase):
@mock.patch("%s.utils.wait_for_status" % IRONIC_UTILS)
def test__create_node(self, mock_wait_for_status):
self.admin_clients("ironic").node.create.return_value = "fake_node"
scenario = utils.IronicScenario(self.context)
scenario.generate_random_name = mock.Mock()
scenario._create_node(driver="fake", properties="fake_prop",
fake_param="foo")
self.admin_clients("ironic").node.create.assert_called_once_with(
driver="fake", properties="fake_prop", fake_param="foo",
name=scenario.generate_random_name.return_value)
self.assertTrue(mock_wait_for_status.called)
self._test_atomic_action_timer(scenario.atomic_actions(),
"ironic.create_node")
@mock.patch("%s.utils.wait_for_status" % IRONIC_UTILS)
def test__delete_node(self, mock_wait_for_status):
mock_node_delete = mock.Mock()
self.admin_clients("ironic").node.delete = mock_node_delete
scenario = utils.IronicScenario(self.context)
scenario._delete_node(mock.Mock(uuid="fake_id"))
self.assertTrue(mock_wait_for_status.called)
self.admin_clients("ironic").node.delete.assert_called_once_with(
"fake_id")
self._test_atomic_action_timer(scenario.atomic_actions(),
"ironic.delete_node")
def test__list_nodes(self):
self.admin_clients("ironic").node.list.return_value = ["fake"]
scenario = utils.IronicScenario(self.context)
fake_params = {
"sort_dir": "foo1",
"associated": "foo2",
"detail": True,
"maintenance": "foo5"
}
return_nodes_list = scenario._list_nodes(**fake_params)
self.assertEqual(["fake"], return_nodes_list)
self.admin_clients("ironic").node.list.assert_called_once_with(
sort_dir="foo1", associated="foo2", detail=True,
maintenance="foo5")
self._test_atomic_action_timer(scenario.atomic_actions(),
"ironic.list_nodes")
| apache-2.0 |
netgroup/dreamer-ryu | ryu/ofproto/ofproto_v1_0_parser.py | 9 | 81719 | # Copyright (C) 2011, 2012 Nippon Telegraph and Telephone Corporation.
# Copyright (C) 2011, 2012 Isaku Yamahata <yamahata at valinux co jp>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Decoder/Encoder implementations of OpenFlow 1.0.
"""
import struct
import binascii
from ofproto_parser import StringifyMixin, MsgBase, msg_pack_into, msg_str_attr
from ryu.lib import addrconv
from ryu.lib import mac
from . import ofproto_parser
from . import ofproto_v1_0 as ofproto
from . import nx_match
from ryu import utils
import logging
LOG = logging.getLogger('ryu.ofproto.ofproto_v1_0_parser')
_MSG_PARSERS = {}
def _set_msg_type(msg_type):
'''Annotate corresponding OFP message type'''
def _set_cls_msg_type(cls):
cls.cls_msg_type = msg_type
return cls
return _set_cls_msg_type
def _register_parser(cls):
'''class decorator to register msg parser'''
assert cls.cls_msg_type is not None
assert cls.cls_msg_type not in _MSG_PARSERS
_MSG_PARSERS[cls.cls_msg_type] = cls.parser
return cls
@ofproto_parser.register_msg_parser(ofproto.OFP_VERSION)
def msg_parser(datapath, version, msg_type, msg_len, xid, buf):
parser = _MSG_PARSERS.get(msg_type)
return parser(datapath, version, msg_type, msg_len, xid, buf)
# OFP_MSG_REPLY = {
# OFPFeaturesRequest: OFPSwitchFeatures,
# OFPBarrierRequest: OFPBarrierReply,
# OFPQueueGetConfigRequest: OFPQueueGetConfigReply,
#
# # ofp_stats_request -> ofp_stats_reply
# OFPDescStatsRequest: OFPDescStatsReply,
# OFPFlowStatsRequest: OFPFlowStatsReply,
# OFPAggregateStatsRequest: OFPAggregateStatsReply,
# OFPTableStatsRequest: OFPTableStatsReply,
# OFPPortStatsRequest: OFPPortStatsReply,
# OFPQueueStatsRequest: OFPQueueStatsReply,
# OFPVendorStatsRequest: OFPVendorStatsReply,
# }
def _set_msg_reply(msg_reply):
'''Annotate OFP reply message class'''
def _set_cls_msg_reply(cls):
cls.cls_msg_reply = msg_reply
return cls
return _set_cls_msg_reply
#
# common structures
#
class OFPPhyPort(ofproto_parser.namedtuple('OFPPhyPort', (
'port_no', 'hw_addr', 'name', 'config', 'state', 'curr', 'advertised',
'supported', 'peer'))):
_TYPE = {
'ascii': [
'hw_addr',
],
'utf-8': [
# OF spec is unclear about the encoding of name.
# we assumes UTF-8, which is used by OVS.
'name',
]
}
@classmethod
def parser(cls, buf, offset):
port = struct.unpack_from(ofproto.OFP_PHY_PORT_PACK_STR,
buf, offset)
port = list(port)
i = cls._fields.index('hw_addr')
port[i] = addrconv.mac.bin_to_text(port[i])
i = cls._fields.index('name')
port[i] = port[i].rstrip('\0')
return cls(*port)
class OFPMatch(StringifyMixin):
def __init__(self, wildcards=None, in_port=None, dl_src=None, dl_dst=None,
dl_vlan=None, dl_vlan_pcp=None, dl_type=None, nw_tos=None,
nw_proto=None, nw_src=None, nw_dst=None,
tp_src=None, tp_dst=None, nw_src_mask=32, nw_dst_mask=32):
super(OFPMatch, self).__init__()
wc = ofproto.OFPFW_ALL
if in_port is None:
self.in_port = 0
else:
wc &= ~ofproto.OFPFW_IN_PORT
self.in_port = in_port
if dl_src is None:
self.dl_src = mac.DONTCARE
else:
wc &= ~ofproto.OFPFW_DL_SRC
if dl_src == 0:
self.dl_src = mac.DONTCARE
else:
self.dl_src = dl_src
if dl_dst is None:
self.dl_dst = mac.DONTCARE
else:
wc &= ~ofproto.OFPFW_DL_DST
if dl_dst == 0:
self.dl_dst = mac.DONTCARE
else:
self.dl_dst = dl_dst
if dl_vlan is None:
self.dl_vlan = 0
else:
wc &= ~ofproto.OFPFW_DL_VLAN
self.dl_vlan = dl_vlan
if dl_vlan_pcp is None:
self.dl_vlan_pcp = 0
else:
wc &= ~ofproto.OFPFW_DL_VLAN_PCP
self.dl_vlan_pcp = dl_vlan_pcp
if dl_type is None:
self.dl_type = 0
else:
wc &= ~ofproto.OFPFW_DL_TYPE
self.dl_type = dl_type
if nw_tos is None:
self.nw_tos = 0
else:
wc &= ~ofproto.OFPFW_NW_TOS
self.nw_tos = nw_tos
if nw_proto is None:
self.nw_proto = 0
else:
wc &= ~ofproto.OFPFW_NW_PROTO
self.nw_proto = nw_proto
if nw_src is None:
self.nw_src = 0
else:
wc &= (32 - nw_src_mask) << ofproto.OFPFW_NW_SRC_SHIFT \
| ~ofproto.OFPFW_NW_SRC_MASK
self.nw_src = nw_src
if nw_dst is None:
self.nw_dst = 0
else:
wc &= (32 - nw_dst_mask) << ofproto.OFPFW_NW_DST_SHIFT \
| ~ofproto.OFPFW_NW_DST_MASK
self.nw_dst = nw_dst
if tp_src is None:
self.tp_src = 0
else:
wc &= ~ofproto.OFPFW_TP_SRC
self.tp_src = tp_src
if tp_dst is None:
self.tp_dst = 0
else:
wc &= ~ofproto.OFPFW_TP_DST
self.tp_dst = tp_dst
if wildcards is None:
self.wildcards = wc
else:
self.wildcards = wildcards
def serialize(self, buf, offset):
msg_pack_into(ofproto.OFP_MATCH_PACK_STR, buf, offset,
self.wildcards, self.in_port, self.dl_src,
self.dl_dst, self.dl_vlan, self.dl_vlan_pcp,
self.dl_type, self.nw_tos, self.nw_proto,
self.nw_src, self.nw_dst, self.tp_src, self.tp_dst)
@classmethod
def parse(cls, buf, offset):
match = struct.unpack_from(ofproto.OFP_MATCH_PACK_STR,
buf, offset)
return cls(*match)
class OFPActionHeader(StringifyMixin):
_base_attributes = ['type', 'len']
def __init__(self, type_, len_):
self.type = type_
self.len = len_
def serialize(self, buf, offset):
msg_pack_into(ofproto.OFP_ACTION_HEADER_PACK_STR,
buf, offset, self.type, self.len)
class OFPAction(OFPActionHeader):
_ACTION_TYPES = {}
@staticmethod
def register_action_type(type_, len_):
def _register_action_type(cls):
cls.cls_action_type = type_
cls.cls_action_len = len_
OFPAction._ACTION_TYPES[cls.cls_action_type] = cls
return cls
return _register_action_type
def __init__(self):
cls = self.__class__
super(OFPAction, self).__init__(cls.cls_action_type,
cls.cls_action_len)
@classmethod
def parser(cls, buf, offset):
type_, len_ = struct.unpack_from(
ofproto.OFP_ACTION_HEADER_PACK_STR, buf, offset)
cls_ = cls._ACTION_TYPES.get(type_)
assert cls_ is not None
return cls_.parser(buf, offset)
@OFPAction.register_action_type(ofproto.OFPAT_OUTPUT,
ofproto.OFP_ACTION_OUTPUT_SIZE)
class OFPActionOutput(OFPAction):
# NOTE: The reason of this magic number (0xffe5)
# is because there is no good constant in of1.0.
# The same value as OFPCML_MAX of of1.2 and of1.3 is used.
def __init__(self, port, max_len=0xffe5):
super(OFPActionOutput, self).__init__()
self.port = port
self.max_len = max_len
@classmethod
def parser(cls, buf, offset):
type_, len_, port, max_len = struct.unpack_from(
ofproto.OFP_ACTION_OUTPUT_PACK_STR, buf, offset)
assert type_ == ofproto.OFPAT_OUTPUT
assert len_ == ofproto.OFP_ACTION_OUTPUT_SIZE
return cls(port, max_len)
def serialize(self, buf, offset):
msg_pack_into(ofproto.OFP_ACTION_OUTPUT_PACK_STR, buf,
offset, self.type, self.len, self.port, self.max_len)
@OFPAction.register_action_type(ofproto.OFPAT_SET_VLAN_VID,
ofproto.OFP_ACTION_VLAN_VID_SIZE)
class OFPActionVlanVid(OFPAction):
def __init__(self, vlan_vid):
super(OFPActionVlanVid, self).__init__()
self.vlan_vid = vlan_vid
@classmethod
def parser(cls, buf, offset):
type_, len_, vlan_vid = struct.unpack_from(
ofproto.OFP_ACTION_VLAN_VID_PACK_STR, buf, offset)
assert type_ == ofproto.OFPAT_SET_VLAN_VID
assert len_ == ofproto.OFP_ACTION_VLAN_VID_SIZE
return cls(vlan_vid)
def serialize(self, buf, offset):
msg_pack_into(ofproto.OFP_ACTION_VLAN_VID_PACK_STR,
buf, offset, self.type, self.len, self.vlan_vid)
@OFPAction.register_action_type(ofproto.OFPAT_SET_VLAN_PCP,
ofproto.OFP_ACTION_VLAN_PCP_SIZE)
class OFPActionVlanPcp(OFPAction):
def __init__(self, vlan_pcp):
super(OFPActionVlanPcp, self).__init__()
self.vlan_pcp = vlan_pcp
@classmethod
def parser(cls, buf, offset):
type_, len_, vlan_pcp = struct.unpack_from(
ofproto.OFP_ACTION_VLAN_PCP_PACK_STR, buf, offset)
assert type_ == ofproto.OFPAT_SET_VLAN_PCP
assert len_ == ofproto.OFP_ACTION_VLAN_PCP_SIZE
return cls(vlan_pcp)
def serialize(self, buf, offset):
msg_pack_into(ofproto.OFP_ACTION_VLAN_PCP_PACK_STR,
buf, offset, self.type, self.len, self.vlan_pcp)
@OFPAction.register_action_type(ofproto.OFPAT_STRIP_VLAN,
ofproto.OFP_ACTION_HEADER_SIZE)
class OFPActionStripVlan(OFPAction):
def __init__(self):
super(OFPActionStripVlan, self).__init__()
@classmethod
def parser(cls, buf, offset):
type_, len_ = struct.unpack_from(
ofproto.OFP_ACTION_HEADER_PACK_STR, buf, offset)
assert type_ == ofproto.OFPAT_STRIP_VLAN
assert len_ == ofproto.OFP_ACTION_HEADER_SIZE
return cls()
class OFPActionDlAddr(OFPAction):
def __init__(self, dl_addr):
super(OFPActionDlAddr, self).__init__()
self.dl_addr = dl_addr
@classmethod
def parser(cls, buf, offset):
type_, len_, dl_addr = struct.unpack_from(
ofproto.OFP_ACTION_DL_ADDR_PACK_STR, buf, offset)
assert type_ in (ofproto.OFPAT_SET_DL_SRC,
ofproto.OFPAT_SET_DL_DST)
assert len_ == ofproto.OFP_ACTION_DL_ADDR_SIZE
return cls(dl_addr)
def serialize(self, buf, offset):
msg_pack_into(ofproto.OFP_ACTION_DL_ADDR_PACK_STR,
buf, offset, self.type, self.len, self.dl_addr)
@OFPAction.register_action_type(ofproto.OFPAT_SET_DL_SRC,
ofproto.OFP_ACTION_DL_ADDR_SIZE)
class OFPActionSetDlSrc(OFPActionDlAddr):
def __init__(self, dl_addr):
super(OFPActionSetDlSrc, self).__init__(dl_addr)
@OFPAction.register_action_type(ofproto.OFPAT_SET_DL_DST,
ofproto.OFP_ACTION_DL_ADDR_SIZE)
class OFPActionSetDlDst(OFPActionDlAddr):
def __init__(self, dl_addr):
super(OFPActionSetDlDst, self).__init__(dl_addr)
class OFPActionNwAddr(OFPAction):
def __init__(self, nw_addr):
super(OFPActionNwAddr, self).__init__()
self.nw_addr = nw_addr
@classmethod
def parser(cls, buf, offset):
type_, len_, nw_addr = struct.unpack_from(
ofproto.OFP_ACTION_NW_ADDR_PACK_STR, buf, offset)
assert type_ in (ofproto.OFPAT_SET_NW_SRC,
ofproto.OFPAT_SET_NW_DST)
assert len_ == ofproto.OFP_ACTION_NW_ADDR_SIZE
return cls(nw_addr)
def serialize(self, buf, offset):
msg_pack_into(ofproto.OFP_ACTION_NW_ADDR_PACK_STR,
buf, offset, self.type, self.len, self.nw_addr)
@OFPAction.register_action_type(ofproto.OFPAT_SET_NW_SRC,
ofproto.OFP_ACTION_NW_ADDR_SIZE)
class OFPActionSetNwSrc(OFPActionNwAddr):
def __init__(self, nw_addr):
super(OFPActionSetNwSrc, self).__init__(nw_addr)
@OFPAction.register_action_type(ofproto.OFPAT_SET_NW_DST,
ofproto.OFP_ACTION_NW_ADDR_SIZE)
class OFPActionSetNwDst(OFPActionNwAddr):
def __init__(self, nw_addr):
super(OFPActionSetNwDst, self).__init__(nw_addr)
@OFPAction.register_action_type(ofproto.OFPAT_SET_NW_TOS,
ofproto.OFP_ACTION_NW_TOS_SIZE)
class OFPActionSetNwTos(OFPAction):
def __init__(self, tos):
super(OFPActionSetNwTos, self).__init__()
self.tos = tos
@classmethod
def parser(cls, buf, offset):
type_, len_, tos = struct.unpack_from(
ofproto.OFP_ACTION_NW_TOS_PACK_STR, buf, offset)
assert type_ == ofproto.OFPAT_SET_NW_TOS
assert len_ == ofproto.OFP_ACTION_NW_TOS_SIZE
return cls(tos)
def serialize(self, buf, offset):
msg_pack_into(ofproto.OFP_ACTION_NW_TOS_PACK_STR,
buf, offset, self.type, self.len, self.tos)
class OFPActionTpPort(OFPAction):
def __init__(self, tp):
super(OFPActionTpPort, self).__init__()
self.tp = tp
@classmethod
def parser(cls, buf, offset):
type_, len_, tp = struct.unpack_from(
ofproto.OFP_ACTION_TP_PORT_PACK_STR, buf, offset)
assert type_ in (ofproto.OFPAT_SET_TP_SRC,
ofproto.OFPAT_SET_TP_DST)
assert len_ == ofproto.OFP_ACTION_TP_PORT_SIZE
return cls(tp)
def serialize(self, buf, offset):
msg_pack_into(ofproto.OFP_ACTION_TP_PORT_PACK_STR,
buf, offset, self.type, self.len, self.tp)
@OFPAction.register_action_type(ofproto.OFPAT_SET_TP_SRC,
ofproto.OFP_ACTION_TP_PORT_SIZE)
class OFPActionSetTpSrc(OFPActionTpPort):
def __init__(self, tp):
super(OFPActionSetTpSrc, self).__init__(tp)
@OFPAction.register_action_type(ofproto.OFPAT_SET_TP_DST,
ofproto.OFP_ACTION_TP_PORT_SIZE)
class OFPActionSetTpDst(OFPActionTpPort):
def __init__(self, tp):
super(OFPActionSetTpDst, self).__init__(tp)
@OFPAction.register_action_type(ofproto.OFPAT_ENQUEUE,
ofproto.OFP_ACTION_ENQUEUE_SIZE)
class OFPActionEnqueue(OFPAction):
def __init__(self, port, queue_id):
super(OFPActionEnqueue, self).__init__()
self.port = port
self.queue_id = queue_id
@classmethod
def parser(cls, buf, offset):
type_, len_, port, queue_id = struct.unpack_from(
ofproto.OFP_ACTION_ENQUEUE_PACK_STR, buf, offset)
assert type_ == ofproto.OFPAT_ENQUEUE
assert len_ == ofproto.OFP_ACTION_ENQUEUE_SIZE
return cls(port, queue_id)
def serialize(self, buf, offset):
msg_pack_into(ofproto.OFP_ACTION_ENQUEUE_PACK_STR, buf, offset,
self.type, self.len, self.port, self.queue_id)
@OFPAction.register_action_type(ofproto.OFPAT_VENDOR, 0)
class OFPActionVendor(OFPAction):
_ACTION_VENDORS = {}
@staticmethod
def register_action_vendor(vendor):
def _register_action_vendor(cls):
cls.cls_vendor = vendor
OFPActionVendor._ACTION_VENDORS[cls.cls_vendor] = cls
return cls
return _register_action_vendor
def __init__(self):
super(OFPActionVendor, self).__init__()
self.vendor = self.cls_vendor
@classmethod
def parser(cls, buf, offset):
type_, len_, vendor = struct.unpack_from(
ofproto.OFP_ACTION_VENDOR_HEADER_PACK_STR, buf, offset)
cls_ = cls._ACTION_VENDORS.get(vendor)
return cls_.parser(buf, offset)
@OFPActionVendor.register_action_vendor(ofproto.NX_VENDOR_ID)
class NXActionHeader(OFPActionVendor):
_NX_ACTION_SUBTYPES = {}
@staticmethod
def register_nx_action_subtype(subtype, len_):
def _register_nx_action_subtype(cls):
cls.cls_action_len = len_
cls.cls_subtype = subtype
NXActionHeader._NX_ACTION_SUBTYPES[cls.cls_subtype] = cls
return cls
return _register_nx_action_subtype
def __init__(self):
super(NXActionHeader, self).__init__()
self.subtype = self.cls_subtype
def serialize(self, buf, offset):
msg_pack_into(ofproto.OFP_ACTION_HEADER_PACK_STR,
buf, offset, self.type, self.len)
@classmethod
def parser(cls, buf, offset):
type_, len_, vendor, subtype = struct.unpack_from(
ofproto.NX_ACTION_HEADER_PACK_STR, buf, offset)
cls_ = cls._NX_ACTION_SUBTYPES.get(subtype)
return cls_.parser(buf, offset)
class NXActionResubmitBase(NXActionHeader):
def __init__(self, in_port, table):
super(NXActionResubmitBase, self).__init__()
assert self.subtype in (ofproto.NXAST_RESUBMIT,
ofproto.NXAST_RESUBMIT_TABLE)
self.in_port = in_port
self.table = table
def serialize(self, buf, offset):
msg_pack_into(ofproto.NX_ACTION_RESUBMIT_PACK_STR, buf, offset,
self.type, self.len, self.vendor, self.subtype,
self.in_port, self.table)
@NXActionHeader.register_nx_action_subtype(
ofproto.NXAST_RESUBMIT, ofproto.NX_ACTION_RESUBMIT_SIZE)
class NXActionResubmit(NXActionResubmitBase):
def __init__(self, in_port=ofproto.OFPP_IN_PORT):
super(NXActionResubmit, self).__init__(in_port, 0)
@classmethod
def parser(cls, buf, offset):
type_, len_, vendor, subtype, in_port, table = struct.unpack_from(
ofproto.NX_ACTION_RESUBMIT_PACK_STR, buf, offset)
return cls(in_port)
@NXActionHeader.register_nx_action_subtype(
ofproto.NXAST_RESUBMIT_TABLE, ofproto.NX_ACTION_RESUBMIT_SIZE)
class NXActionResubmitTable(NXActionResubmitBase):
def __init__(self, in_port=ofproto.OFPP_IN_PORT, table=0xff):
super(NXActionResubmitTable, self).__init__(in_port, table)
@classmethod
def parser(cls, buf, offset):
type_, len_, vendor, subtype, in_port, table = struct.unpack_from(
ofproto.NX_ACTION_RESUBMIT_PACK_STR, buf, offset)
return cls(in_port, table)
@NXActionHeader.register_nx_action_subtype(
ofproto.NXAST_SET_TUNNEL, ofproto.NX_ACTION_SET_TUNNEL_SIZE)
class NXActionSetTunnel(NXActionHeader):
def __init__(self, tun_id):
super(NXActionSetTunnel, self).__init__()
self.tun_id = tun_id
def serialize(self, buf, offset):
msg_pack_into(ofproto.NX_ACTION_SET_TUNNEL_PACK_STR, buf,
offset, self.type, self.len, self.vendor, self.subtype,
self.tun_id)
@classmethod
def parser(cls, buf, offset):
type_, len_, vendor, subtype, tun_id = struct.unpack_from(
ofproto.NX_ACTION_SET_TUNNEL_PACK_STR, buf, offset)
return cls(tun_id)
@NXActionHeader.register_nx_action_subtype(
ofproto.NXAST_SET_QUEUE, ofproto.NX_ACTION_SET_QUEUE_SIZE)
class NXActionSetQueue(NXActionHeader):
def __init__(self, queue_id):
super(NXActionSetQueue, self).__init__()
self.queue_id = queue_id
def serialize(self, buf, offset):
msg_pack_into(ofproto.NX_ACTION_SET_QUEUE_PACK_STR, buf,
offset, self.type, self.len, self.vendor,
self.subtype, self.queue_id)
@classmethod
def parser(cls, buf, offset):
(type_, len_, vendor, subtype, queue_id) = struct.unpack_from(
ofproto.NX_ACTION_SET_QUEUE_PACK_STR, buf, offset)
return cls(queue_id)
@NXActionHeader.register_nx_action_subtype(
ofproto.NXAST_POP_QUEUE, ofproto.NX_ACTION_POP_QUEUE_SIZE)
class NXActionPopQueue(NXActionHeader):
def __init__(self):
super(NXActionPopQueue, self).__init__()
def serialize(self, buf, offset):
msg_pack_into(ofproto.NX_ACTION_POP_QUEUE_PACK_STR, buf,
offset, self.type, self.len, self.vendor,
self.subtype)
@classmethod
def parser(cls, buf, offset):
(type_, len_, vendor, subtype) = struct.unpack_from(
ofproto.NX_ACTION_POP_QUEUE_PACK_STR, buf, offset)
return cls()
@NXActionHeader.register_nx_action_subtype(
ofproto.NXAST_REG_MOVE, ofproto.NX_ACTION_REG_MOVE_SIZE)
class NXActionRegMove(NXActionHeader):
def __init__(self, n_bits, src_ofs, dst_ofs, src, dst):
super(NXActionRegMove, self).__init__()
self.n_bits = n_bits
self.src_ofs = src_ofs
self.dst_ofs = dst_ofs
self.src = src
self.dst = dst
def serialize(self, buf, offset):
msg_pack_into(ofproto.NX_ACTION_REG_MOVE_PACK_STR, buf,
offset, self.type, self.len, self.vendor,
self.subtype, self.n_bits, self.src_ofs, self.dst_ofs,
self.src, self.dst)
@classmethod
def parser(cls, buf, offset):
(type_, len_, vendor, subtype, n_bits, src_ofs, dst_ofs,
src, dst) = struct.unpack_from(
ofproto.NX_ACTION_REG_MOVE_PACK_STR, buf, offset)
return cls(n_bits, src_ofs, dst_ofs, src, dst)
@NXActionHeader.register_nx_action_subtype(
ofproto.NXAST_REG_LOAD, ofproto.NX_ACTION_REG_LOAD_SIZE)
class NXActionRegLoad(NXActionHeader):
def __init__(self, ofs_nbits, dst, value):
super(NXActionRegLoad, self).__init__()
self.ofs_nbits = ofs_nbits
self.dst = dst
self.value = value
def serialize(self, buf, offset):
msg_pack_into(ofproto.NX_ACTION_REG_LOAD_PACK_STR, buf,
offset, self.type, self.len, self.vendor,
self.subtype, self.ofs_nbits, self.dst, self.value)
@classmethod
def parser(cls, buf, offset):
(type_, len_, vendor, subtype, ofs_nbits, dst,
value) = struct.unpack_from(
ofproto.NX_ACTION_REG_LOAD_PACK_STR, buf, offset)
return cls(ofs_nbits, dst, value)
@NXActionHeader.register_nx_action_subtype(
ofproto.NXAST_SET_TUNNEL64, ofproto.NX_ACTION_SET_TUNNEL64_SIZE)
class NXActionSetTunnel64(NXActionHeader):
def __init__(self, tun_id):
super(NXActionSetTunnel64, self).__init__()
self.tun_id = tun_id
def serialize(self, buf, offset):
msg_pack_into(ofproto.NX_ACTION_SET_TUNNEL64_PACK_STR, buf,
offset, self.type, self.len, self.vendor, self.subtype,
self.tun_id)
@classmethod
def parser(cls, buf, offset):
type_, len_, vendor, subtype, tun_id = struct.unpack_from(
ofproto.NX_ACTION_SET_TUNNEL64_PACK_STR, buf, offset)
return cls(tun_id)
@NXActionHeader.register_nx_action_subtype(
ofproto.NXAST_MULTIPATH, ofproto.NX_ACTION_MULTIPATH_SIZE)
class NXActionMultipath(NXActionHeader):
def __init__(self, fields, basis, algorithm, max_link, arg,
ofs_nbits, dst):
super(NXActionMultipath, self).__init__()
self.fields = fields
self.basis = basis
self.algorithm = algorithm
self.max_link = max_link
self.arg = arg
self.ofs_nbits = ofs_nbits
self.dst = dst
def serialize(self, buf, offset):
msg_pack_into(ofproto.NX_ACTION_MULTIPATH_PACK_STR, buf,
offset, self.type, self.len, self.vendor, self.subtype,
self.fields, self.basis, self.algorithm, self.max_link,
self.arg, self.ofs_nbits, self.dst)
@classmethod
def parser(cls, buf, offset):
(type_, len_, vendor, subtype, fields, basis, algorithm,
max_link, arg, ofs_nbits, dst) = struct.unpack_from(
ofproto.NX_ACTION_MULTIPATH_PACK_STR, buf, offset)
return cls(fields, basis, algorithm, max_link, arg, ofs_nbits,
dst)
@NXActionHeader.register_nx_action_subtype(ofproto.NXAST_NOTE, 0)
class NXActionNote(NXActionHeader):
def __init__(self, note):
super(NXActionNote, self).__init__()
# should check here if the note is valid (only hex values)
pad = (len(note) + 10) % 8
if pad:
note += [0x0 for i in range(8 - pad)]
self.note = note
self.len = len(note) + 10
def serialize(self, buf, offset):
note = self.note
extra = None
extra_len = len(self.note) - 6
if extra_len > 0:
extra = note[6:]
note = note[0:6]
msg_pack_into(ofproto.NX_ACTION_NOTE_PACK_STR, buf,
offset, self.type, self.len, self.vendor, self.subtype,
*note)
if extra_len > 0:
msg_pack_into('B' * extra_len, buf,
offset + ofproto.NX_ACTION_NOTE_SIZE,
*extra)
@classmethod
def parser(cls, buf, offset):
note = struct.unpack_from(
ofproto.NX_ACTION_NOTE_PACK_STR, buf, offset)
(type_, len_, vendor, subtype) = note[0:4]
note = [i for i in note[4:]]
if len_ > ofproto.NX_ACTION_NOTE_SIZE:
note_start = offset + ofproto.NX_ACTION_NOTE_SIZE
note_end = note_start + len_ - ofproto.NX_ACTION_NOTE_SIZE
note += [int(binascii.b2a_hex(i), 16) for i
in buf[note_start:note_end]]
return cls(note)
class NXActionBundleBase(NXActionHeader):
def __init__(self, algorithm, fields, basis, slave_type, n_slaves,
ofs_nbits, dst, slaves):
super(NXActionBundleBase, self).__init__()
_len = ofproto.NX_ACTION_BUNDLE_SIZE + len(slaves) * 2
_len += (_len % 8)
self.len = _len
self.algorithm = algorithm
self.fields = fields
self.basis = basis
self.slave_type = slave_type
self.n_slaves = n_slaves
self.ofs_nbits = ofs_nbits
self.dst = dst
self.slaves = slaves
def serialize(self, buf, offset):
slave_offset = offset + ofproto.NX_ACTION_BUNDLE_SIZE
for s in self.slaves:
msg_pack_into('!H', buf, slave_offset, s)
slave_offset += 2
pad_len = (len(self.slaves) * 2 +
ofproto.NX_ACTION_BUNDLE_SIZE) % 8
if pad_len != 0:
msg_pack_into('%dx' % pad_len, buf, slave_offset)
msg_pack_into(ofproto.NX_ACTION_BUNDLE_PACK_STR, buf,
offset, self.type, self.len, self.vendor, self.subtype,
self.algorithm, self.fields, self.basis,
self.slave_type, self.n_slaves,
self.ofs_nbits, self.dst)
@classmethod
def parser(cls, action_cls, buf, offset):
(type_, len_, vendor, subtype, algorithm, fields, basis,
slave_type, n_slaves, ofs_nbits, dst) = struct.unpack_from(
ofproto.NX_ACTION_BUNDLE_PACK_STR, buf, offset)
slave_offset = offset + ofproto.NX_ACTION_BUNDLE_SIZE
slaves = []
for i in range(0, n_slaves):
s = struct.unpack_from('!H', buf, slave_offset)
slaves.append(s[0])
slave_offset += 2
return action_cls(algorithm, fields, basis, slave_type,
n_slaves, ofs_nbits, dst, slaves)
@NXActionHeader.register_nx_action_subtype(ofproto.NXAST_BUNDLE, 0)
class NXActionBundle(NXActionBundleBase):
def __init__(self, algorithm, fields, basis, slave_type, n_slaves,
ofs_nbits, dst, slaves):
super(NXActionBundle, self).__init__(
algorithm, fields, basis, slave_type, n_slaves,
ofs_nbits, dst, slaves)
@classmethod
def parser(cls, buf, offset):
return NXActionBundleBase.parser(NXActionBundle, buf, offset)
@NXActionHeader.register_nx_action_subtype(ofproto.NXAST_BUNDLE_LOAD, 0)
class NXActionBundleLoad(NXActionBundleBase):
def __init__(self, algorithm, fields, basis, slave_type, n_slaves,
ofs_nbits, dst, slaves):
super(NXActionBundleLoad, self).__init__(
algorithm, fields, basis, slave_type, n_slaves,
ofs_nbits, dst, slaves)
@classmethod
def parser(cls, buf, offset):
return NXActionBundleBase.parser(NXActionBundleLoad, buf, offset)
@NXActionHeader.register_nx_action_subtype(
ofproto.NXAST_AUTOPATH, ofproto.NX_ACTION_AUTOPATH_SIZE)
class NXActionAutopath(NXActionHeader):
def __init__(self, ofs_nbits, dst, id_):
super(NXActionAutopath, self).__init__()
self.ofs_nbits = ofs_nbits
self.dst = dst
self.id = id_
def serialize(self, buf, offset):
msg_pack_into(ofproto.NX_ACTION_AUTOPATH_PACK_STR, buf, offset,
self.type, self.len, self.vendor, self.subtype,
self.ofs_nbits, self.dst, self.id)
@classmethod
def parser(cls, buf, offset):
(type_, len_, vendor, subtype, ofs_nbits, dst,
id_) = struct.unpack_from(
ofproto.NX_ACTION_AUTOPATH_PACK_STR, buf, offset)
return cls(ofs_nbits, dst, id_)
@NXActionHeader.register_nx_action_subtype(
ofproto.NXAST_OUTPUT_REG, ofproto.NX_ACTION_OUTPUT_REG_SIZE)
class NXActionOutputReg(NXActionHeader):
def __init__(self, ofs_nbits, src, max_len):
super(NXActionOutputReg, self).__init__()
self.ofs_nbits = ofs_nbits
self.src = src
self.max_len = max_len
def serialize(self, buf, offset):
msg_pack_into(ofproto.NX_ACTION_OUTPUT_REG_PACK_STR, buf, offset,
self.type, self.len, self.vendor, self.subtype,
self.ofs_nbits, self.src, self.max_len)
@classmethod
def parser(cls, buf, offset):
(type_, len_, vendor, subtype, ofs_nbits, src,
max_len) = struct.unpack_from(
ofproto.NX_ACTION_OUTPUT_REG_PACK_STR, buf, offset)
return cls(ofs_nbits, src, max_len)
@NXActionHeader.register_nx_action_subtype(
ofproto.NXAST_EXIT, ofproto.NX_ACTION_HEADER_SIZE)
class NXActionExit(NXActionHeader):
def __init__(self):
super(NXActionExit, self).__init__()
def serialize(self, buf, offset):
msg_pack_into(ofproto.NX_ACTION_HEADER_PACK_STR, buf, offset,
self.type, self.len, self.vendor, self.subtype)
@classmethod
def parser(cls, buf, offset):
(type_, len_, vendor, subtype) = struct.unpack_from(
ofproto.NX_ACTION_HEADER_PACK_STR, buf, offset)
return cls()
@NXActionHeader.register_nx_action_subtype(
ofproto.NXAST_DEC_TTL, ofproto.NX_ACTION_HEADER_SIZE)
class NXActionDecTtl(NXActionHeader):
def __init__(self):
super(NXActionDecTtl, self).__init__()
def serialize(self, buf, offset):
msg_pack_into(ofproto.NX_ACTION_HEADER_PACK_STR, buf, offset,
self.type, self.len, self.vendor, self.subtype)
@classmethod
def parser(cls, buf, offset):
(type_, len_, vendor, subtype) = struct.unpack_from(
ofproto.NX_ACTION_HEADER_PACK_STR, buf, offset)
return cls()
@NXActionHeader.register_nx_action_subtype(ofproto.NXAST_LEARN, 0)
class NXActionLearn(NXActionHeader):
def __init__(self, idle_timeout, hard_timeout, priority, cookie, flags,
table_id, fin_idle_timeout, fin_hard_timeout, spec):
super(NXActionLearn, self).__init__()
len_ = len(spec) + ofproto.NX_ACTION_LEARN_SIZE
pad_len = 8 - (len_ % 8)
self.len = len_ + pad_len
self.idle_timeout = idle_timeout
self.hard_timeout = hard_timeout
self.priority = priority
self.cookie = cookie
self.flags = flags
self.table_id = table_id
self.fin_idle_timeout = fin_idle_timeout
self.fin_hard_timeout = fin_hard_timeout
self.spec = spec + bytearray('\x00' * pad_len)
def serialize(self, buf, offset):
msg_pack_into(ofproto.NX_ACTION_LEARN_PACK_STR, buf, offset,
self.type, self.len, self.vendor, self.subtype,
self.idle_timeout, self.hard_timeout, self.priority,
self.cookie, self.flags, self.table_id,
self.fin_idle_timeout, self.fin_hard_timeout)
buf += self.spec
@classmethod
def parser(cls, buf, offset):
(type_, len_, vendor, subtype, idle_timeout, hard_timeout, priority,
cookie, flags, table_id, fin_idle_timeout,
fin_hard_timeout) = struct.unpack_from(
ofproto.NX_ACTION_LEARN_PACK_STR, buf, offset)
spec = buf[offset + ofproto.NX_ACTION_LEARN_SIZE:]
return cls(idle_timeout, hard_timeout, priority,
cookie, flags, table_id, fin_idle_timeout,
fin_hard_timeout, spec)
@NXActionHeader.register_nx_action_subtype(
ofproto.NXAST_CONTROLLER, ofproto.NX_ACTION_CONTROLLER_SIZE)
class NXActionController(NXActionHeader):
def __init__(self, max_len, controller_id, reason):
super(NXActionController, self).__init__()
self.max_len = max_len
self.controller_id = controller_id
self.reason = reason
def serialize(self, buf, offset):
msg_pack_into(ofproto.NX_ACTION_CONTROLLER_PACK_STR, buf, offset,
self.type, self.len, self.vendor, self.subtype,
self.max_len, self.controller_id, self.reason, 0)
@classmethod
def parser(cls, buf, offset):
(type_, len_, vendor, subtype, max_len, controller_id, reason,
_zero) = struct.unpack_from(
ofproto.NX_ACTION_CONTROLLER_PACK_STR, buf, offset)
return cls(max_len, controller_id, reason)
@NXActionHeader.register_nx_action_subtype(
ofproto.NXAST_FIN_TIMEOUT, ofproto.NX_ACTION_FIN_TIMEOUT_SIZE)
class NXActionFinTimeout(NXActionHeader):
def __init__(self, fin_idle_timeout, fin_hard_timeout):
super(NXActionFinTimeout, self).__init__()
self.fin_idle_timeout = fin_idle_timeout
self.fin_hard_timeout = fin_hard_timeout
def serialize(self, buf, offset):
msg_pack_into(ofproto.NX_ACTION_FIN_TIMEOUT_PACK_STR, buf, offset,
self.type, self.len, self.vendor, self.subtype,
self.fin_idle_timeout, self.fin_hard_timeout)
@classmethod
def parser(cls, buf, offset):
(type_, len_, vendor, subtype, fin_idle_timeout,
fin_hard_timeout) = struct.unpack_from(
ofproto.NX_ACTION_FIN_TIMEOUT_PACK_STR, buf, offset)
return cls(fin_idle_timeout, fin_hard_timeout)
class OFPDescStats(ofproto_parser.namedtuple('OFPDescStats', (
'mfr_desc', 'hw_desc', 'sw_desc', 'serial_num', 'dp_desc'))):
_TYPE = {
'ascii': [
'mfr_desc',
'hw_desc',
'sw_desc',
'serial_num',
'dp_desc',
]
}
@classmethod
def parser(cls, buf, offset):
desc = struct.unpack_from(ofproto.OFP_DESC_STATS_PACK_STR,
buf, offset)
desc = list(desc)
desc = map(lambda x: x.rstrip('\0'), desc)
stats = cls(*desc)
stats.length = ofproto.OFP_DESC_STATS_SIZE
return stats
class OFPFlowStats(StringifyMixin):
def __init__(self):
super(OFPFlowStats, self).__init__()
self.length = None
self.table_id = None
self.match = None
self.duration_sec = None
self.duration_nsec = None
self.priority = None
self.idle_timeout = None
self.hard_timeout = None
self.cookie = None
self.packet_count = None
self.byte_count = None
self.actions = None
@classmethod
def parser(cls, buf, offset):
flow_stats = cls()
flow_stats.length, flow_stats.table_id = struct.unpack_from(
ofproto.OFP_FLOW_STATS_0_PACK_STR, buf, offset)
offset += ofproto.OFP_FLOW_STATS_0_SIZE
flow_stats.match = OFPMatch.parse(buf, offset)
offset += ofproto.OFP_MATCH_SIZE
(flow_stats.duration_sec,
flow_stats.duration_nsec,
flow_stats.priority,
flow_stats.idle_timeout,
flow_stats.hard_timeout,
flow_stats.cookie,
flow_stats.packet_count,
flow_stats.byte_count) = struct.unpack_from(
ofproto.OFP_FLOW_STATS_1_PACK_STR, buf, offset)
offset += ofproto.OFP_FLOW_STATS_1_SIZE
flow_stats.actions = []
length = ofproto.OFP_FLOW_STATS_SIZE
while length < flow_stats.length:
action = OFPAction.parser(buf, offset)
flow_stats.actions.append(action)
offset += action.len
length += action.len
return flow_stats
class OFPAggregateStats(ofproto_parser.namedtuple('OFPAggregateStats', (
'packet_count', 'byte_count', 'flow_count'))):
@classmethod
def parser(cls, buf, offset):
agg = struct.unpack_from(
ofproto.OFP_AGGREGATE_STATS_REPLY_PACK_STR, buf, offset)
stats = cls(*agg)
stats.length = ofproto.OFP_AGGREGATE_STATS_REPLY_SIZE
return stats
class OFPTableStats(ofproto_parser.namedtuple('OFPTableStats', (
'table_id', 'name', 'wildcards', 'max_entries', 'active_count',
'lookup_count', 'matched_count'))):
_TYPE = {
'utf-8': [
# OF spec is unclear about the encoding of name.
# we assumes UTF-8.
'name',
]
}
@classmethod
def parser(cls, buf, offset):
tbl = struct.unpack_from(ofproto.OFP_TABLE_STATS_PACK_STR,
buf, offset)
tbl = list(tbl)
i = cls._fields.index('name')
tbl[i] = tbl[i].rstrip('\0')
stats = cls(*tbl)
stats.length = ofproto.OFP_TABLE_STATS_SIZE
return stats
class OFPPortStats(ofproto_parser.namedtuple('OFPPortStats', (
'port_no', 'rx_packets', 'tx_packets', 'rx_bytes', 'tx_bytes',
'rx_dropped', 'tx_dropped', 'rx_errors', 'tx_errors',
'rx_frame_err', 'rx_over_err', 'rx_crc_err', 'collisions'))):
@classmethod
def parser(cls, buf, offset):
port = struct.unpack_from(ofproto.OFP_PORT_STATS_PACK_STR,
buf, offset)
stats = cls(*port)
stats.length = ofproto.OFP_PORT_STATS_SIZE
return stats
class OFPQueueStats(ofproto_parser.namedtuple('OFPQueueStats', (
'port_no', 'queue_id', 'tx_bytes', 'tx_packets', 'tx_errors'))):
@classmethod
def parser(cls, buf, offset):
queue = struct.unpack_from(ofproto.OFP_QUEUE_STATS_PACK_STR,
buf, offset)
stats = cls(*queue)
stats.length = ofproto.OFP_QUEUE_STATS_SIZE
return stats
class OFPVendorStats(ofproto_parser.namedtuple('OFPVendorStats',
('specific_data'))):
@classmethod
def parser(cls, buf, offset):
stats = cls(buf[offset:])
stats.length = len(stats.specific_data)
return stats
class NXFlowStats(StringifyMixin):
def __init__(self):
super(NXFlowStats, self).__init__()
self.length = None
self.table_id = None
self.duration_sec = None
self.duration_nsec = None
self.priority = None
self.idle_timeout = None
self.hard_timeout = None
self.match_len = None
self.idle_age = None
self.hard_age = None
self.cookie = None
self.packet_count = None
self.byte_count = None
@classmethod
def parser(cls, buf, offset):
original_offset = offset
nxflow_stats = cls()
(nxflow_stats.length, nxflow_stats.table_id,
nxflow_stats.duration_sec, nxflow_stats.duration_nsec,
nxflow_stats.priority, nxflow_stats.idle_timeout,
nxflow_stats.hard_timeout, nxflow_stats.match_len,
nxflow_stats.idle_age, nxflow_stats.hard_age,
nxflow_stats.cookie, nxflow_stats.packet_count,
nxflow_stats.byte_count) = struct.unpack_from(
ofproto.NX_FLOW_STATS_PACK_STR, buf, offset)
offset += ofproto.NX_FLOW_STATS_SIZE
fields = []
match_len = nxflow_stats.match_len
match_len -= 4
while match_len > 0:
field = nx_match.MFField.parser(buf, offset)
offset += field.length
match_len -= field.length
fields.append(field)
nxflow_stats.fields = fields
actions = []
total_len = original_offset + nxflow_stats.length
match_len = nxflow_stats.match_len
offset += utils.round_up(match_len, 8) - match_len
while offset < total_len:
action = OFPAction.parser(buf, offset)
actions.append(action)
offset += action.len
nxflow_stats.actions = actions
return nxflow_stats
class NXAggregateStats(ofproto_parser.namedtuple('NXAggregateStats', (
'packet_count', 'byte_count', 'flow_count'))):
@classmethod
def parser(cls, buf, offset):
agg = struct.unpack_from(
ofproto.NX_AGGREGATE_STATS_REPLY_PACK_STR, buf, offset)
stats = cls(*agg)
stats.length = ofproto.NX_AGGREGATE_STATS_REPLY_SIZE
return stats
class OFPQueuePropHeader(StringifyMixin):
_QUEUE_PROPERTIES = {}
@staticmethod
def register_queue_property(prop_type, prop_len):
def _register_queue_propery(cls):
cls.cls_prop_type = prop_type
cls.cls_prop_len = prop_len
OFPQueuePropHeader._QUEUE_PROPERTIES[prop_type] = cls
return cls
return _register_queue_propery
def __init__(self):
self.property = self.cls_prop_type
self.len = self.cls_prop_len
@classmethod
def parser(cls, buf, offset):
property_, len_ = struct.unpack_from(
ofproto.OFP_QUEUE_PROP_HEADER_PACK_STR, buf, offset)
prop_cls = cls._QUEUE_PROPERTIES[property_]
assert property_ == prop_cls.cls_prop_type
assert len_ == prop_cls.cls_prop_len
offset += ofproto.OFP_QUEUE_PROP_HEADER_SIZE
return prop_cls.parser(buf, offset)
@OFPQueuePropHeader.register_queue_property(
ofproto.OFPQT_NONE, ofproto.OFP_QUEUE_PROP_HEADER_SIZE)
class OFPQueuePropNone(OFPQueuePropHeader):
def __init__(self):
super(OFPQueuePropNone, self).__init__()
@classmethod
def parser(cls, buf, offset):
return cls()
@OFPQueuePropHeader.register_queue_property(
ofproto.OFPQT_MIN_RATE, ofproto.OFP_QUEUE_PROP_MIN_RATE_SIZE)
class OFPQueuePropMinRate(OFPQueuePropHeader):
def __init__(self, rate):
super(OFPQueuePropMinRate, self).__init__()
self.rate = rate
@classmethod
def parser(cls, buf, offset):
(rate,) = struct.unpack_from(
ofproto.OFP_QUEUE_PROP_MIN_RATE_PACK_STR,
buf, offset)
return cls(rate)
class OFPPacketQueue(StringifyMixin):
def __init__(self, queue_id, len_):
self.queue_id = queue_id
self.len = len_
self.properties = None
@classmethod
def parser(cls, buf, offset):
queue_id, len_ = struct.unpack_from(
ofproto.OFP_PACKET_QUEUE_PQCK_STR, buf, offset)
packet_queue = cls(queue_id, len_)
packet_queue.properties = []
cur_len = ofproto.OFP_PACKET_QUEUE_SIZE
offset += ofproto.OFP_PACKET_QUEUE_SIZE
while (cur_len + ofproto.OFP_QUEUE_PROP_HEADER_SIZE <=
packet_queue.len):
prop = OFPQueuePropHeader.parser(buf, offset)
packet_queue.properties.append(prop)
cur_len += prop.len
offset += prop.len
return packet_queue
#
# Symmetric messages
# parser + serializer
#
@_register_parser
@_set_msg_type(ofproto.OFPT_HELLO)
class OFPHello(MsgBase):
def __init__(self, datapath):
super(OFPHello, self).__init__(datapath)
@_register_parser
@_set_msg_type(ofproto.OFPT_ERROR)
class OFPErrorMsg(MsgBase):
def __init__(self, datapath, type_=None, code=None, data=None):
super(OFPErrorMsg, self).__init__(datapath)
self.type = type_
self.code = code
self.data = data
@classmethod
def parser(cls, datapath, version, msg_type, msg_len, xid, buf):
msg = super(OFPErrorMsg, cls).parser(datapath, version, msg_type,
msg_len, xid, buf)
msg.type, msg.code = struct.unpack_from(
ofproto.OFP_ERROR_MSG_PACK_STR, msg.buf,
ofproto.OFP_HEADER_SIZE)
msg.data = msg.buf[ofproto.OFP_ERROR_MSG_SIZE:]
return msg
def _serialize_body(self):
assert self.data is not None
msg_pack_into(ofproto.OFP_ERROR_MSG_PACK_STR, self.buf,
ofproto.OFP_HEADER_SIZE, self.type, self.code)
self.buf += self.data
@_register_parser
@_set_msg_type(ofproto.OFPT_ECHO_REQUEST)
class OFPEchoRequest(MsgBase):
def __init__(self, datapath, data=None):
super(OFPEchoRequest, self).__init__(datapath)
self.data = data
@classmethod
def parser(cls, datapath, version, msg_type, msg_len, xid, buf):
msg = super(OFPEchoRequest, cls).parser(datapath, version, msg_type,
msg_len, xid, buf)
msg.data = msg.buf[ofproto.OFP_HEADER_SIZE:]
return msg
def _serialize_body(self):
if self.data is not None:
self.buf += self.data
@_register_parser
@_set_msg_type(ofproto.OFPT_ECHO_REPLY)
class OFPEchoReply(MsgBase):
def __init__(self, datapath, data=None):
super(OFPEchoReply, self).__init__(datapath)
self.data = data
@classmethod
def parser(cls, datapath, version, msg_type, msg_len, xid, buf):
msg = super(OFPEchoReply, cls).parser(datapath, version, msg_type,
msg_len, xid, buf)
msg.data = msg.buf[ofproto.OFP_HEADER_SIZE:]
return msg
def _serialize_body(self):
assert self.data is not None
self.buf += self.data
@_register_parser
@_set_msg_type(ofproto.OFPT_VENDOR)
class OFPVendor(MsgBase):
_VENDORS = {}
@staticmethod
def register_vendor(id_):
def _register_vendor(cls):
OFPVendor._VENDORS[id_] = cls
return cls
return _register_vendor
def __init__(self, datapath):
super(OFPVendor, self).__init__(datapath)
self.data = None
self.vendor = None
@classmethod
def parser(cls, datapath, version, msg_type, msg_len, xid, buf):
msg = super(OFPVendor, cls).parser(datapath, version, msg_type,
msg_len, xid, buf)
(msg.vendor,) = struct.unpack_from(
ofproto.OFP_VENDOR_HEADER_PACK_STR, msg.buf,
ofproto.OFP_HEADER_SIZE)
cls_ = cls._VENDORS.get(msg.vendor)
if cls_:
msg.data = cls_.parser(datapath, msg.buf, 0)
else:
msg.data = msg.buf[ofproto.OFP_VENDOR_HEADER_SIZE:]
return msg
def serialize_header(self):
msg_pack_into(ofproto.OFP_VENDOR_HEADER_PACK_STR,
self.buf, ofproto.OFP_HEADER_SIZE, self.vendor)
def _serialize_body(self):
assert self.data is not None
self.serialize_header()
self.buf += self.data
@OFPVendor.register_vendor(ofproto.NX_VENDOR_ID)
class NiciraHeader(OFPVendor):
_NX_SUBTYPES = {}
@staticmethod
def register_nx_subtype(subtype):
def _register_nx_subtype(cls):
cls.cls_subtype = subtype
NiciraHeader._NX_SUBTYPES[cls.cls_subtype] = cls
return cls
return _register_nx_subtype
def __init__(self, datapath, subtype):
super(NiciraHeader, self).__init__(datapath)
self.vendor = ofproto.NX_VENDOR_ID
self.subtype = subtype
def serialize_header(self):
super(NiciraHeader, self).serialize_header()
msg_pack_into(ofproto.NICIRA_HEADER_PACK_STR,
self.buf, ofproto.OFP_HEADER_SIZE,
self.vendor, self.subtype)
@classmethod
def parser(cls, datapath, buf, offset):
vendor, subtype = struct.unpack_from(
ofproto.NICIRA_HEADER_PACK_STR, buf,
offset + ofproto.OFP_HEADER_SIZE)
cls_ = cls._NX_SUBTYPES.get(subtype)
return cls_.parser(datapath, buf,
offset + ofproto.NICIRA_HEADER_SIZE)
class NXTSetFlowFormat(NiciraHeader):
def __init__(self, datapath, flow_format):
super(NXTSetFlowFormat, self).__init__(
datapath, ofproto.NXT_SET_FLOW_FORMAT)
self.format = flow_format
def _serialize_body(self):
self.serialize_header()
msg_pack_into(ofproto.NX_SET_FLOW_FORMAT_PACK_STR,
self.buf, ofproto.NICIRA_HEADER_SIZE, self.format)
class NXTFlowMod(NiciraHeader):
def __init__(self, datapath, cookie, command,
idle_timeout=0, hard_timeout=0,
priority=ofproto.OFP_DEFAULT_PRIORITY,
buffer_id=0xffffffff, out_port=ofproto.OFPP_NONE,
flags=0, rule=None, actions=None):
# the argument, rule, is positioned at the one before the last due
# to the layout struct nxt_flow_mod.
# Although rule must be given, default argument to rule, None,
# is given to allow other default value of argument before rule.
assert rule is not None
if actions is None:
actions = []
super(NXTFlowMod, self).__init__(datapath, ofproto.NXT_FLOW_MOD)
self.cookie = cookie
self.command = command
self.idle_timeout = idle_timeout
self.hard_timeout = hard_timeout
self.priority = priority
self.buffer_id = buffer_id
self.out_port = out_port
self.flags = flags
self.rule = rule
self.actions = actions
def _serialize_body(self):
self.serialize_header()
offset = ofproto.NX_FLOW_MOD_SIZE
match_len = nx_match.serialize_nxm_match(self.rule, self.buf, offset)
offset += nx_match.round_up(match_len)
msg_pack_into(ofproto.NX_FLOW_MOD_PACK_STR,
self.buf, ofproto.NICIRA_HEADER_SIZE,
self.cookie, self.command, self.idle_timeout,
self.hard_timeout, self.priority, self.buffer_id,
self.out_port, self.flags, match_len)
if self.actions is not None:
for a in self.actions:
a.serialize(self.buf, offset)
offset += a.len
class NXTRoleRequest(NiciraHeader):
def __init__(self, datapath, role):
super(NXTRoleRequest, self).__init__(
datapath, ofproto.NXT_ROLE_REQUEST)
self.role = role
def _serialize_body(self):
self.serialize_header()
msg_pack_into(ofproto.NX_ROLE_PACK_STR,
self.buf, ofproto.NICIRA_HEADER_SIZE, self.role)
@NiciraHeader.register_nx_subtype(ofproto.NXT_ROLE_REPLY)
class NXTRoleReply(NiciraHeader):
def __init__(self, datapath, role):
super(NXTRoleReply, self).__init__(
datapath, ofproto.NXT_ROLE_REPLY)
self.role = role
@classmethod
def parser(cls, datapath, buf, offset):
(role,) = struct.unpack_from(
ofproto.NX_ROLE_PACK_STR, buf, offset)
return cls(datapath, role)
class NXTFlowModTableId(NiciraHeader):
def __init__(self, datapath, set_):
super(NXTFlowModTableId, self).__init__(
datapath, ofproto.NXT_FLOW_MOD_TABLE_ID)
self.set = set_
def _serialize_body(self):
self.serialize_header()
msg_pack_into(ofproto.NX_FLOW_MOD_TABLE_ID_PACK_STR,
self.buf, ofproto.NICIRA_HEADER_SIZE,
self.set)
@NiciraHeader.register_nx_subtype(ofproto.NXT_FLOW_REMOVED)
class NXTFlowRemoved(NiciraHeader):
def __init__(self, datapath, cookie, priority, reason,
duration_sec, duration_nsec, idle_timeout, match_len,
packet_count, byte_count, match):
super(NXTFlowRemoved, self).__init__(
datapath, ofproto.NXT_FLOW_REMOVED)
self.cookie = cookie
self.priority = priority
self.reason = reason
self.duration_sec = duration_sec
self.duration_nsec = duration_nsec
self.idle_timeout = idle_timeout
self.match_len = match_len
self.packet_count = packet_count
self.byte_count = byte_count
self.match = match
@classmethod
def parser(cls, datapath, buf, offset):
(cookie, priority, reason, duration_sec, duration_nsec,
idle_timeout, match_len,
packet_count, byte_count) = struct.unpack_from(
ofproto.NX_FLOW_REMOVED_PACK_STR, buf, offset)
offset += (ofproto.NX_FLOW_REMOVED_SIZE
- ofproto.NICIRA_HEADER_SIZE)
match = nx_match.NXMatch.parser(buf, offset, match_len)
return cls(datapath, cookie, priority, reason, duration_sec,
duration_nsec, idle_timeout, match_len, packet_count,
byte_count, match)
class NXTSetPacketInFormat(NiciraHeader):
def __init__(self, datapath, packet_in_format):
super(NXTSetPacketInFormat, self).__init__(
datapath, ofproto.NXT_SET_PACKET_IN_FORMAT)
self.format = packet_in_format
def _serialize_body(self):
self.serialize_header()
msg_pack_into(ofproto.NX_SET_PACKET_IN_FORMAT_PACK_STR,
self.buf, ofproto.NICIRA_HEADER_SIZE,
self.format)
@NiciraHeader.register_nx_subtype(ofproto.NXT_PACKET_IN)
class NXTPacketIn(NiciraHeader):
def __init__(self, datapath, buffer_id, total_len, reason, table_id,
cookie, match_len, match, frame):
super(NXTPacketIn, self).__init__(
datapath, ofproto.NXT_PACKET_IN)
self.buffer_id = buffer_id
self.total_len = total_len
self.reason = reason
self.table_id = table_id
self.cookie = cookie
self.match_len = match_len
self.match = match
self.frame = frame
@classmethod
def parser(cls, datapath, buf, offset):
(buffer_id, total_len, reason, table_id,
cookie, match_len) = struct.unpack_from(
ofproto.NX_PACKET_IN_PACK_STR, buf, offset)
offset += (ofproto.NX_PACKET_IN_SIZE
- ofproto.NICIRA_HEADER_SIZE)
match = nx_match.NXMatch.parser(buf, offset, match_len)
offset += (match_len + 7) / 8 * 8
frame = buf[offset:]
if total_len < len(frame):
frame = frame[:total_len]
return cls(datapath, buffer_id, total_len, reason, table_id,
cookie, match_len, match, frame)
class NXTFlowAge(NiciraHeader):
def __init__(self, datapath):
super(NXTFlowAge, self).__init__(
datapath, ofproto.NXT_FLOW_AGE)
def _serialize_body(self):
self.serialize_header()
class NXTSetAsyncConfig(NiciraHeader):
def __init__(self, datapath, packet_in_mask, port_status_mask,
flow_removed_mask):
super(NXTSetAsyncConfig, self).__init__(
datapath, ofproto.NXT_SET_ASYNC_CONFIG)
self.packet_in_mask = packet_in_mask
self.port_status_mask = port_status_mask
self.flow_removed_mask = flow_removed_mask
def _serialize_body(self):
self.serialize_header()
msg_pack_into(ofproto.NX_ASYNC_CONFIG_PACK_STR,
self.buf, ofproto.NICIRA_HEADER_SIZE,
self.packet_in_mask[0], self.packet_in_mask[1],
self.port_status_mask[0], self.port_status_mask[1],
self.flow_removed_mask[0], self.flow_removed_mask[1])
class NXTSetControllerId(NiciraHeader):
def __init__(self, datapath, controller_id):
super(NXTSetControllerId, self).__init__(
datapath, ofproto.NXT_SET_CONTROLLER_ID)
self.controller_id = controller_id
def _serialize_body(self):
self.serialize_header()
msg_pack_into(ofproto.NX_CONTROLLER_ID_PACK_STR,
self.buf, ofproto.NICIRA_HEADER_SIZE,
self.controller_id)
#
# asymmetric message (datapath -> controller)
# parser only
#
@_register_parser
@_set_msg_type(ofproto.OFPT_FEATURES_REPLY)
class OFPSwitchFeatures(MsgBase):
def __init__(self, datapath, datapath_id=None, n_buffers=None,
n_tables=None, capabilities=None, actions=None, ports=None):
super(OFPSwitchFeatures, self).__init__(datapath)
self.datapath_id = datapath_id
self.n_buffers = n_buffers
self.n_tables = n_tables
self.capabilities = capabilities
self.actions = actions
self.ports = ports
@classmethod
def parser(cls, datapath, version, msg_type, msg_len, xid, buf):
msg = super(OFPSwitchFeatures, cls).parser(datapath, version, msg_type,
msg_len, xid, buf)
(msg.datapath_id,
msg.n_buffers,
msg.n_tables,
msg.capabilities,
msg.actions) = struct.unpack_from(
ofproto.OFP_SWITCH_FEATURES_PACK_STR, msg.buf,
ofproto.OFP_HEADER_SIZE)
msg.ports = {}
n_ports = ((msg_len - ofproto.OFP_SWITCH_FEATURES_SIZE) /
ofproto.OFP_PHY_PORT_SIZE)
offset = ofproto.OFP_SWITCH_FEATURES_SIZE
for _i in range(n_ports):
port = OFPPhyPort.parser(msg.buf, offset)
# print 'port = %s' % str(port)
msg.ports[port.port_no] = port
offset += ofproto.OFP_PHY_PORT_SIZE
return msg
@_register_parser
@_set_msg_type(ofproto.OFPT_PORT_STATUS)
class OFPPortStatus(MsgBase):
def __init__(self, datapath, reason=None, desc=None):
super(OFPPortStatus, self).__init__(datapath)
self.reason = reason
self.desc = desc
@classmethod
def parser(cls, datapath, version, msg_type, msg_len, xid, buf):
msg = super(OFPPortStatus, cls).parser(datapath, version, msg_type,
msg_len, xid, buf)
msg.reason = struct.unpack_from(
ofproto.OFP_PORT_STATUS_PACK_STR,
msg.buf, ofproto.OFP_HEADER_SIZE)[0]
msg.desc = OFPPhyPort.parser(msg.buf,
ofproto.OFP_PORT_STATUS_DESC_OFFSET)
return msg
@_register_parser
@_set_msg_type(ofproto.OFPT_PACKET_IN)
class OFPPacketIn(MsgBase):
def __init__(self, datapath, buffer_id=None, total_len=None, in_port=None,
reason=None, data=None):
super(OFPPacketIn, self).__init__(datapath)
self.buffer_id = buffer_id
self.total_len = total_len
self.in_port = in_port
self.reason = reason
self.data = data
@classmethod
def parser(cls, datapath, version, msg_type, msg_len, xid, buf):
msg = super(OFPPacketIn, cls).parser(datapath, version, msg_type,
msg_len, xid, buf)
(msg.buffer_id,
msg.total_len,
msg.in_port,
msg.reason) = struct.unpack_from(
ofproto.OFP_PACKET_IN_PACK_STR,
msg.buf, ofproto.OFP_HEADER_SIZE)
msg.data = msg.buf[ofproto.OFP_PACKET_IN_SIZE:]
if msg.total_len < len(msg.data):
# discard padding for 8-byte alignment of OFP packet
msg.data = msg.data[:msg.total_len]
return msg
@_register_parser
@_set_msg_type(ofproto.OFPT_GET_CONFIG_REPLY)
class OFPGetConfigReply(MsgBase):
def __init__(self, datapath):
super(OFPGetConfigReply, self).__init__(datapath)
@classmethod
def parser(cls, datapath, version, msg_type, msg_len, xid, buf):
msg = super(OFPGetConfigReply, cls).parser(datapath, version, msg_type,
msg_len, xid, buf)
(msg.flags, msg.miss_send_len) = struct.unpack_from(
ofproto.OFP_SWITCH_CONFIG_PACK_STR,
msg.buf, ofproto.OFP_HEADER_SIZE)
return msg
@_register_parser
@_set_msg_type(ofproto.OFPT_BARRIER_REPLY)
class OFPBarrierReply(MsgBase):
def __init__(self, datapath):
super(OFPBarrierReply, self).__init__(datapath)
@_register_parser
@_set_msg_type(ofproto.OFPT_FLOW_REMOVED)
class OFPFlowRemoved(MsgBase):
def __init__(self, datapath):
super(OFPFlowRemoved, self).__init__(datapath)
@classmethod
def parser(cls, datapath, version, msg_type, msg_len, xid, buf):
msg = super(OFPFlowRemoved, cls).parser(datapath, version, msg_type,
msg_len, xid, buf)
msg.match = OFPMatch.parse(msg.buf, ofproto.OFP_HEADER_SIZE)
(msg.cookie,
msg.priority,
msg.reason,
msg.duration_sec,
msg.duration_nsec,
msg.idle_timeout,
msg.packet_count,
msg.byte_count) = struct.unpack_from(
ofproto.OFP_FLOW_REMOVED_PACK_STR0, msg.buf,
ofproto.OFP_HEADER_SIZE + ofproto.OFP_MATCH_SIZE)
return msg
@_register_parser
@_set_msg_type(ofproto.OFPT_QUEUE_GET_CONFIG_REPLY)
class OFPQueueGetConfigReply(MsgBase):
def __init__(self, datapath):
super(OFPQueueGetConfigReply, self).__init__(datapath)
@classmethod
def parser(cls, datapath, version, msg_type, msg_len, xid, buf):
msg = super(OFPQueueGetConfigReply, cls).parser(
datapath, version, msg_type, msg_len, xid, buf)
offset = ofproto.OFP_HEADER_SIZE
(msg.port,) = struct.unpack_from(
ofproto.OFP_QUEUE_GET_CONFIG_REPLY_PACK_STR, msg.buf, offset)
msg.queues = []
offset = ofproto.OFP_QUEUE_GET_CONFIG_REPLY_SIZE
while offset + ofproto.OFP_PACKET_QUEUE_SIZE <= msg_len:
queue = OFPPacketQueue.parser(msg.buf, offset)
msg.queues.append(queue)
offset += queue.len
return msg
def _set_stats_type(stats_type, stats_body_cls):
def _set_cls_stats_type(cls):
cls.cls_stats_type = stats_type
cls.cls_stats_body_cls = stats_body_cls
return cls
return _set_cls_stats_type
@_register_parser
@_set_msg_type(ofproto.OFPT_STATS_REPLY)
class OFPStatsReply(MsgBase):
_STATS_MSG_TYPES = {}
@staticmethod
def register_stats_type(body_single_struct=False):
def _register_stats_type(cls):
assert cls.cls_stats_type is not None
assert cls.cls_stats_type not in OFPStatsReply._STATS_MSG_TYPES
assert cls.cls_stats_body_cls is not None
cls.cls_body_single_struct = body_single_struct
OFPStatsReply._STATS_MSG_TYPES[cls.cls_stats_type] = cls
return cls
return _register_stats_type
def __init__(self, datapath):
super(OFPStatsReply, self).__init__(datapath)
self.type = None
self.flags = None
self.body = None
@classmethod
def parser_stats_body(cls, buf, msg_len, offset):
body_cls = cls.cls_stats_body_cls
body = []
while offset < msg_len:
entry = body_cls.parser(buf, offset)
body.append(entry)
offset += entry.length
if cls.cls_body_single_struct:
return body[0]
return body
@classmethod
def parser_stats(cls, datapath, version, msg_type, msg_len, xid, buf):
# call MsgBase::parser, not OFPStatsReply::parser
msg = MsgBase.parser.__func__(
cls, datapath, version, msg_type, msg_len, xid, buf)
msg.body = msg.parser_stats_body(msg.buf, msg.msg_len,
ofproto.OFP_STATS_MSG_SIZE)
return msg
@classmethod
def parser(cls, datapath, version, msg_type, msg_len, xid, buf):
type_, flags = struct.unpack_from(ofproto.OFP_STATS_MSG_PACK_STR,
buffer(buf),
ofproto.OFP_HEADER_SIZE)
stats_type_cls = cls._STATS_MSG_TYPES.get(type_)
msg = stats_type_cls.parser_stats(
datapath, version, msg_type, msg_len, xid, buf)
msg.type = type_
msg.flags = flags
return msg
@OFPStatsReply.register_stats_type(body_single_struct=True)
@_set_stats_type(ofproto.OFPST_DESC, OFPDescStats)
@_set_msg_type(ofproto.OFPT_STATS_REPLY)
class OFPDescStatsReply(OFPStatsReply):
def __init__(self, datapath):
super(OFPDescStatsReply, self).__init__(datapath)
@OFPStatsReply.register_stats_type()
@_set_stats_type(ofproto.OFPST_FLOW, OFPFlowStats)
@_set_msg_type(ofproto.OFPT_STATS_REPLY)
class OFPFlowStatsReply(OFPStatsReply):
def __init__(self, datapath):
super(OFPFlowStatsReply, self).__init__(datapath)
@OFPStatsReply.register_stats_type()
@_set_stats_type(ofproto.OFPST_AGGREGATE, OFPAggregateStats)
@_set_msg_type(ofproto.OFPT_STATS_REPLY)
class OFPAggregateStatsReply(OFPStatsReply):
def __init__(self, datapath):
super(OFPAggregateStatsReply, self).__init__(datapath)
@OFPStatsReply.register_stats_type()
@_set_stats_type(ofproto.OFPST_TABLE, OFPTableStats)
@_set_msg_type(ofproto.OFPT_STATS_REPLY)
class OFPTableStatsReply(OFPStatsReply):
def __init__(self, datapath):
super(OFPTableStatsReply, self).__init__(datapath)
@OFPStatsReply.register_stats_type()
@_set_stats_type(ofproto.OFPST_PORT, OFPPortStats)
@_set_msg_type(ofproto.OFPT_STATS_REPLY)
class OFPPortStatsReply(OFPStatsReply):
def __init__(self, datapath):
super(OFPPortStatsReply, self).__init__(datapath)
@OFPStatsReply.register_stats_type()
@_set_stats_type(ofproto.OFPST_QUEUE, OFPQueueStats)
@_set_msg_type(ofproto.OFPT_STATS_REPLY)
class OFPQueueStatsReply(OFPStatsReply):
def __init__(self, datapath):
super(OFPQueueStatsReply, self).__init__(datapath)
@OFPStatsReply.register_stats_type()
@_set_stats_type(ofproto.OFPST_VENDOR, OFPVendorStats)
@_set_msg_type(ofproto.OFPT_STATS_REPLY)
class OFPVendorStatsReply(OFPStatsReply):
_STATS_VENDORS = {}
@staticmethod
def register_stats_vendor(vendor):
def _register_stats_vendor(cls):
cls.cls_vendor = vendor
OFPVendorStatsReply._STATS_VENDORS[cls.cls_vendor] = cls
return cls
return _register_stats_vendor
def __init__(self, datapath):
super(OFPVendorStatsReply, self).__init__(datapath)
@classmethod
def parser_stats(cls, datapath, version, msg_type, msg_len, xid,
buf):
(type_,) = struct.unpack_from(
ofproto.OFP_VENDOR_STATS_MSG_PACK_STR, buffer(buf),
ofproto.OFP_STATS_MSG_SIZE)
cls_ = cls._STATS_VENDORS.get(type_)
if cls_ is None:
msg = MsgBase.parser.__func__(
cls, datapath, version, msg_type, msg_len, xid, buf)
body_cls = cls.cls_stats_body_cls
body = body_cls.parser(buf,
ofproto.OFP_STATS_MSG_SIZE)
msg.body = body
return msg
return cls_.parser(
datapath, version, msg_type, msg_len, xid, buf,
ofproto.OFP_VENDOR_STATS_MSG_SIZE)
@OFPVendorStatsReply.register_stats_vendor(ofproto.NX_VENDOR_ID)
class NXStatsReply(OFPStatsReply):
_NX_STATS_TYPES = {}
@staticmethod
def register_nx_stats_type(body_single_struct=False):
def _register_nx_stats_type(cls):
assert cls.cls_stats_type is not None
assert cls.cls_stats_type not in \
NXStatsReply._NX_STATS_TYPES
assert cls.cls_stats_body_cls is not None
cls.cls_body_single_struct = body_single_struct
NXStatsReply._NX_STATS_TYPES[cls.cls_stats_type] = cls
return cls
return _register_nx_stats_type
@classmethod
def parser_stats_body(cls, buf, msg_len, offset):
body_cls = cls.cls_stats_body_cls
body = []
while offset < msg_len:
entry = body_cls.parser(buf, offset)
body.append(entry)
offset += entry.length
if cls.cls_body_single_struct:
return body[0]
return body
@classmethod
def parser_stats(cls, datapath, version, msg_type, msg_len, xid,
buf, offset):
msg = MsgBase.parser.__func__(
cls, datapath, version, msg_type, msg_len, xid, buf)
msg.body = msg.parser_stats_body(msg.buf, msg.msg_len, offset)
return msg
@classmethod
def parser(cls, datapath, version, msg_type, msg_len, xid, buf,
offset):
(type_,) = struct.unpack_from(
ofproto.NX_STATS_MSG_PACK_STR, buffer(buf), offset)
offset += ofproto.NX_STATS_MSG0_SIZE
cls_ = cls._NX_STATS_TYPES.get(type_)
msg = cls_.parser_stats(
datapath, version, msg_type, msg_len, xid, buf, offset)
return msg
@NXStatsReply.register_nx_stats_type()
@_set_stats_type(ofproto.NXST_FLOW, NXFlowStats)
class NXFlowStatsReply(NXStatsReply):
def __init__(self, datapath):
super(NXFlowStatsReply, self).__init__(datapath)
@NXStatsReply.register_nx_stats_type()
@_set_stats_type(ofproto.NXST_AGGREGATE, NXAggregateStats)
class NXAggregateStatsReply(NXStatsReply):
def __init__(self, datapath):
super(NXAggregateStatsReply, self).__init__(datapath)
#
# controller-to-switch message
# serializer only
#
@_set_msg_reply(OFPSwitchFeatures)
@_set_msg_type(ofproto.OFPT_FEATURES_REQUEST)
class OFPFeaturesRequest(MsgBase):
def __init__(self, datapath):
super(OFPFeaturesRequest, self).__init__(datapath)
@_set_msg_type(ofproto.OFPT_GET_CONFIG_REQUEST)
class OFPGetConfigRequest(MsgBase):
def __init__(self, datapath):
super(OFPGetConfigRequest, self).__init__(datapath)
@_set_msg_type(ofproto.OFPT_SET_CONFIG)
class OFPSetConfig(MsgBase):
def __init__(self, datapath, flags=None, miss_send_len=None):
super(OFPSetConfig, self).__init__(datapath)
self.flags = flags
self.miss_send_len = miss_send_len
def _serialize_body(self):
assert self.flags is not None
assert self.miss_send_len is not None
msg_pack_into(ofproto.OFP_SWITCH_CONFIG_PACK_STR,
self.buf, ofproto.OFP_HEADER_SIZE,
self.flags, self.miss_send_len)
@_set_msg_type(ofproto.OFPT_PACKET_OUT)
class OFPPacketOut(MsgBase):
def __init__(self, datapath, buffer_id=None, in_port=None, actions=None,
data=None):
super(OFPPacketOut, self).__init__(datapath)
self.buffer_id = buffer_id
self.in_port = in_port
self._actions_len = None
self.actions = actions
self.data = data
def _serialize_body(self):
assert self.buffer_id is not None
assert self.in_port is not None
assert self.actions is not None
self._actions_len = 0
offset = ofproto.OFP_PACKET_OUT_SIZE
for a in self.actions:
a.serialize(self.buf, offset)
offset += a.len
self._actions_len += a.len
if self.data is not None:
assert self.buffer_id == 0xffffffff
self.buf += self.data
msg_pack_into(ofproto.OFP_PACKET_OUT_PACK_STR,
self.buf, ofproto.OFP_HEADER_SIZE,
self.buffer_id, self.in_port, self._actions_len)
@_set_msg_type(ofproto.OFPT_FLOW_MOD)
class OFPFlowMod(MsgBase):
def __init__(self, datapath, match, cookie, command,
idle_timeout=0, hard_timeout=0,
priority=ofproto.OFP_DEFAULT_PRIORITY,
buffer_id=0xffffffff, out_port=ofproto.OFPP_NONE,
flags=0, actions=None):
if actions is None:
actions = []
super(OFPFlowMod, self).__init__(datapath)
self.match = match
self.cookie = cookie
self.command = command
self.idle_timeout = idle_timeout
self.hard_timeout = hard_timeout
self.priority = priority
self.buffer_id = buffer_id
self.out_port = out_port
self.flags = flags
self.actions = actions
def _serialize_body(self):
offset = ofproto.OFP_HEADER_SIZE
self.match.serialize(self.buf, offset)
offset += ofproto.OFP_MATCH_SIZE
msg_pack_into(ofproto.OFP_FLOW_MOD_PACK_STR0, self.buf, offset,
self.cookie, self.command,
self.idle_timeout, self.hard_timeout,
self.priority, self.buffer_id, self.out_port,
self.flags)
offset = ofproto.OFP_FLOW_MOD_SIZE
if self.actions is not None:
for a in self.actions:
a.serialize(self.buf, offset)
offset += a.len
@_set_msg_type(ofproto.OFPT_PORT_MOD)
class OFPPortMod(MsgBase):
_TYPE = {
'ascii': [
'hw_addr',
]
}
def __init__(self, datapath, port_no=0, hw_addr='00:00:00:00:00:00',
config=0, mask=0, advertise=0):
super(OFPPortMod, self).__init__(datapath)
self.port_no = port_no
self.hw_addr = hw_addr
self.config = config
self.mask = mask
self.advertise = advertise
def _serialize_body(self):
msg_pack_into(ofproto.OFP_PORT_MOD_PACK_STR,
self.buf, ofproto.OFP_HEADER_SIZE,
self.port_no, addrconv.mac.text_to_bin(self.hw_addr),
self.config, self.mask, self.advertise)
@_set_msg_reply(OFPBarrierReply)
@_set_msg_type(ofproto.OFPT_BARRIER_REQUEST)
class OFPBarrierRequest(MsgBase):
def __init__(self, datapath):
super(OFPBarrierRequest, self).__init__(datapath)
@_set_msg_reply(OFPQueueGetConfigReply)
@_set_msg_type(ofproto.OFPT_QUEUE_GET_CONFIG_REQUEST)
class OFPQueueGetConfigRequest(MsgBase):
def __init__(self, datapath, port):
super(OFPQueueGetConfigRequest, self).__init__(datapath)
self.port = port
def _serialize_body(self):
msg_pack_into(ofproto.OFP_QUEUE_GET_CONFIG_REQUEST_PACK_STR,
self.buf, ofproto.OFP_HEADER_SIZE, self.port)
class OFPStatsRequest(MsgBase):
def __init__(self, datapath, flags):
assert flags == 0 # none yet defined
super(OFPStatsRequest, self).__init__(datapath)
self.type = self.__class__.cls_stats_type
self.flags = flags
def _serialize_stats_body(self):
pass
def _serialize_body(self):
msg_pack_into(ofproto.OFP_STATS_MSG_PACK_STR,
self.buf, ofproto.OFP_HEADER_SIZE,
self.type, self.flags)
self._serialize_stats_body()
@_set_msg_reply(OFPDescStatsReply)
@_set_stats_type(ofproto.OFPST_DESC, OFPDescStats)
@_set_msg_type(ofproto.OFPT_STATS_REQUEST)
class OFPDescStatsRequest(OFPStatsRequest):
def __init__(self, datapath, flags):
super(OFPDescStatsRequest, self).__init__(datapath, flags)
class OFPFlowStatsRequestBase(OFPStatsRequest):
def __init__(self, datapath, flags, match, table_id, out_port):
super(OFPFlowStatsRequestBase, self).__init__(datapath, flags)
self.match = match
self.table_id = table_id
self.out_port = out_port
def _serialize_stats_body(self):
offset = ofproto.OFP_STATS_MSG_SIZE
self.match.serialize(self.buf, offset)
offset += ofproto.OFP_MATCH_SIZE
msg_pack_into(ofproto.OFP_FLOW_STATS_REQUEST_ID_PORT_STR,
self.buf, offset, self.table_id, self.out_port)
@_set_msg_reply(OFPFlowStatsReply)
@_set_stats_type(ofproto.OFPST_FLOW, OFPFlowStats)
@_set_msg_type(ofproto.OFPT_STATS_REQUEST)
class OFPFlowStatsRequest(OFPFlowStatsRequestBase):
def __init__(self, datapath, flags, match, table_id, out_port):
super(OFPFlowStatsRequest, self).__init__(
datapath, flags, match, table_id, out_port)
@_set_msg_reply(OFPAggregateStatsReply)
@_set_stats_type(ofproto.OFPST_AGGREGATE, OFPAggregateStats)
@_set_msg_type(ofproto.OFPT_STATS_REQUEST)
class OFPAggregateStatsRequest(OFPFlowStatsRequestBase):
def __init__(self, datapath, flags, match, table_id, out_port):
super(OFPAggregateStatsRequest, self).__init__(
datapath, flags, match, table_id, out_port)
@_set_msg_reply(OFPTableStatsReply)
@_set_stats_type(ofproto.OFPST_TABLE, OFPTableStats)
@_set_msg_type(ofproto.OFPT_STATS_REQUEST)
class OFPTableStatsRequest(OFPStatsRequest):
def __init__(self, datapath, flags):
super(OFPTableStatsRequest, self).__init__(datapath, flags)
@_set_msg_reply(OFPPortStatsReply)
@_set_stats_type(ofproto.OFPST_PORT, OFPPortStats)
@_set_msg_type(ofproto.OFPT_STATS_REQUEST)
class OFPPortStatsRequest(OFPStatsRequest):
def __init__(self, datapath, flags, port_no):
super(OFPPortStatsRequest, self).__init__(datapath, flags)
self.port_no = port_no
def _serialize_stats_body(self):
msg_pack_into(ofproto.OFP_PORT_STATS_REQUEST_PACK_STR,
self.buf, ofproto.OFP_STATS_MSG_SIZE, self.port_no)
@_set_msg_reply(OFPQueueStatsReply)
@_set_stats_type(ofproto.OFPST_QUEUE, OFPQueueStats)
@_set_msg_type(ofproto.OFPT_STATS_REQUEST)
class OFPQueueStatsRequest(OFPStatsRequest):
def __init__(self, datapath, flags, port_no, queue_id):
super(OFPQueueStatsRequest, self).__init__(datapath, flags)
self.port_no = port_no
self.queue_id = queue_id
def _serialize_stats_body(self):
msg_pack_into(ofproto.OFP_QUEUE_STATS_REQUEST_PACK_STR,
self.buf, ofproto.OFP_STATS_MSG_SIZE,
self.port_no, self.queue_id)
@_set_msg_reply(OFPVendorStatsReply)
@_set_stats_type(ofproto.OFPST_VENDOR, OFPVendorStats)
@_set_msg_type(ofproto.OFPT_STATS_REQUEST)
class OFPVendorStatsRequest(OFPStatsRequest):
def __init__(self, datapath, flags, vendor, specific_data=None):
super(OFPVendorStatsRequest, self).__init__(datapath, flags)
self.vendor = vendor
self.specific_data = specific_data
def _serialize_vendor_stats(self):
self.buf += self.specific_data
def _serialize_stats_body(self):
msg_pack_into(ofproto.OFP_VENDOR_STATS_MSG_PACK_STR,
self.buf, ofproto.OFP_STATS_MSG_SIZE,
self.vendor)
self._serialize_vendor_stats()
class NXStatsRequest(OFPVendorStatsRequest):
def __init__(self, datapath, flags, subtype):
super(NXStatsRequest, self).__init__(datapath, flags,
ofproto.NX_VENDOR_ID)
self.subtype = subtype
def _serialize_vendor_stats_body(self):
pass
def _serialize_vendor_stats(self):
msg_pack_into(ofproto.NX_STATS_MSG_PACK_STR, self.buf,
ofproto.OFP_VENDOR_STATS_MSG_SIZE,
self.subtype)
self._serialize_vendor_stats_body()
class NXFlowStatsRequest(NXStatsRequest):
def __init__(self, datapath, flags, out_port, table_id, rule=None):
super(NXFlowStatsRequest, self).__init__(datapath, flags,
ofproto.NXST_FLOW)
self.out_port = out_port
self.table_id = table_id
self.rule = rule
self.match_len = 0
def _serialize_vendor_stats_body(self):
if self.rule is not None:
offset = ofproto.NX_STATS_MSG_SIZE + \
ofproto.NX_FLOW_STATS_REQUEST_SIZE
self.match_len = nx_match.serialize_nxm_match(
self.rule, self.buf, offset)
msg_pack_into(
ofproto.NX_FLOW_STATS_REQUEST_PACK_STR,
self.buf, ofproto.NX_STATS_MSG_SIZE, self.out_port,
self.match_len, self.table_id)
class NXAggregateStatsRequest(NXStatsRequest):
def __init__(self, datapath, flags, out_port, table_id, rule=None):
super(NXAggregateStatsRequest, self).__init__(
datapath, flags, ofproto.NXST_AGGREGATE)
self.out_port = out_port
self.table_id = table_id
self.rule = rule
self.match_len = 0
def _serialize_vendor_stats_body(self):
if self.rule is not None:
offset = ofproto.NX_STATS_MSG_SIZE + \
ofproto.NX_AGGREGATE_STATS_REQUEST_SIZE
self.match_len = nx_match.serialize_nxm_match(
self.rule, self.buf, offset)
msg_pack_into(
ofproto.NX_AGGREGATE_STATS_REQUEST_PACK_STR,
self.buf, ofproto.NX_STATS_MSG_SIZE, self.out_port,
self.match_len, self.table_id)
| apache-2.0 |
c0hen/django-venv | lib/python3.4/site-packages/pip/utils/build.py | 899 | 1312 | from __future__ import absolute_import
import os.path
import tempfile
from pip.utils import rmtree
class BuildDirectory(object):
def __init__(self, name=None, delete=None):
# If we were not given an explicit directory, and we were not given an
# explicit delete option, then we'll default to deleting.
if name is None and delete is None:
delete = True
if name is None:
# We realpath here because some systems have their default tmpdir
# symlinked to another directory. This tends to confuse build
# scripts, so we canonicalize the path by traversing potential
# symlinks here.
name = os.path.realpath(tempfile.mkdtemp(prefix="pip-build-"))
# If we were not given an explicit directory, and we were not given
# an explicit delete option, then we'll default to deleting.
if delete is None:
delete = True
self.name = name
self.delete = delete
def __repr__(self):
return "<{} {!r}>".format(self.__class__.__name__, self.name)
def __enter__(self):
return self.name
def __exit__(self, exc, value, tb):
self.cleanup()
def cleanup(self):
if self.delete:
rmtree(self.name)
| gpl-3.0 |
wkentaro/docopt | examples/options_example.py | 18 | 1597 | """Example of program with many options using docopt.
Usage:
options_example.py [-hvqrf NAME] [--exclude=PATTERNS]
[--select=ERRORS | --ignore=ERRORS] [--show-source]
[--statistics] [--count] [--benchmark] PATH...
options_example.py (--doctest | --testsuite=DIR)
options_example.py --version
Arguments:
PATH destination path
Options:
-h --help show this help message and exit
--version show version and exit
-v --verbose print status messages
-q --quiet report only file names
-r --repeat show all occurrences of the same error
--exclude=PATTERNS exclude files or directories which match these comma
separated patterns [default: .svn,CVS,.bzr,.hg,.git]
-f NAME --file=NAME when parsing directories, only check filenames matching
these comma separated patterns [default: *.py]
--select=ERRORS select errors and warnings (e.g. E,W6)
--ignore=ERRORS skip errors and warnings (e.g. E4,W)
--show-source show source code for each error
--statistics count errors and warnings
--count print total number of errors and warnings to standard
error and set exit code to 1 if total is not null
--benchmark measure processing speed
--testsuite=DIR run regression tests from dir
--doctest run doctest on myself
"""
from docopt import docopt
if __name__ == '__main__':
arguments = docopt(__doc__, version='1.0.0rc2')
print(arguments)
| mit |
bingosummer/azure-linux-extensions | DSC/azure/__init__.py | 46 | 33598 | #-------------------------------------------------------------------------
# Copyright (c) Microsoft. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#--------------------------------------------------------------------------
import ast
import base64
import hashlib
import hmac
import sys
import types
import warnings
import inspect
if sys.version_info < (3,):
from urllib2 import quote as url_quote
from urllib2 import unquote as url_unquote
_strtype = basestring
else:
from urllib.parse import quote as url_quote
from urllib.parse import unquote as url_unquote
_strtype = str
from datetime import datetime
from xml.dom import minidom
from xml.sax.saxutils import escape as xml_escape
#--------------------------------------------------------------------------
# constants
__author__ = 'Microsoft Corp. <ptvshelp@microsoft.com>'
__version__ = '0.8.4'
# Live ServiceClient URLs
BLOB_SERVICE_HOST_BASE = '.blob.core.windows.net'
QUEUE_SERVICE_HOST_BASE = '.queue.core.windows.net'
TABLE_SERVICE_HOST_BASE = '.table.core.windows.net'
SERVICE_BUS_HOST_BASE = '.servicebus.windows.net'
MANAGEMENT_HOST = 'management.core.windows.net'
# Development ServiceClient URLs
DEV_BLOB_HOST = '127.0.0.1:10000'
DEV_QUEUE_HOST = '127.0.0.1:10001'
DEV_TABLE_HOST = '127.0.0.1:10002'
# Default credentials for Development Storage Service
DEV_ACCOUNT_NAME = 'devstoreaccount1'
DEV_ACCOUNT_KEY = 'Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw=='
# All of our error messages
_ERROR_CANNOT_FIND_PARTITION_KEY = 'Cannot find partition key in request.'
_ERROR_CANNOT_FIND_ROW_KEY = 'Cannot find row key in request.'
_ERROR_INCORRECT_TABLE_IN_BATCH = \
'Table should be the same in a batch operations'
_ERROR_INCORRECT_PARTITION_KEY_IN_BATCH = \
'Partition Key should be the same in a batch operations'
_ERROR_DUPLICATE_ROW_KEY_IN_BATCH = \
'Row Keys should not be the same in a batch operations'
_ERROR_BATCH_COMMIT_FAIL = 'Batch Commit Fail'
_ERROR_MESSAGE_NOT_PEEK_LOCKED_ON_DELETE = \
'Message is not peek locked and cannot be deleted.'
_ERROR_MESSAGE_NOT_PEEK_LOCKED_ON_UNLOCK = \
'Message is not peek locked and cannot be unlocked.'
_ERROR_QUEUE_NOT_FOUND = 'Queue was not found'
_ERROR_TOPIC_NOT_FOUND = 'Topic was not found'
_ERROR_CONFLICT = 'Conflict ({0})'
_ERROR_NOT_FOUND = 'Not found ({0})'
_ERROR_UNKNOWN = 'Unknown error ({0})'
_ERROR_SERVICEBUS_MISSING_INFO = \
'You need to provide servicebus namespace, access key and Issuer'
_ERROR_STORAGE_MISSING_INFO = \
'You need to provide both account name and access key'
_ERROR_ACCESS_POLICY = \
'share_access_policy must be either SignedIdentifier or AccessPolicy ' + \
'instance'
_WARNING_VALUE_SHOULD_BE_BYTES = \
'Warning: {0} must be bytes data type. It will be converted ' + \
'automatically, with utf-8 text encoding.'
_ERROR_VALUE_SHOULD_BE_BYTES = '{0} should be of type bytes.'
_ERROR_VALUE_NONE = '{0} should not be None.'
_ERROR_VALUE_NEGATIVE = '{0} should not be negative.'
_ERROR_CANNOT_SERIALIZE_VALUE_TO_ENTITY = \
'Cannot serialize the specified value ({0}) to an entity. Please use ' + \
'an EntityProperty (which can specify custom types), int, str, bool, ' + \
'or datetime.'
_ERROR_PAGE_BLOB_SIZE_ALIGNMENT = \
'Invalid page blob size: {0}. ' + \
'The size must be aligned to a 512-byte boundary.'
_USER_AGENT_STRING = 'pyazure/' + __version__
METADATA_NS = 'http://schemas.microsoft.com/ado/2007/08/dataservices/metadata'
class WindowsAzureData(object):
''' This is the base of data class.
It is only used to check whether it is instance or not. '''
pass
class WindowsAzureError(Exception):
''' WindowsAzure Excpetion base class. '''
def __init__(self, message):
super(WindowsAzureError, self).__init__(message)
class WindowsAzureConflictError(WindowsAzureError):
'''Indicates that the resource could not be created because it already
exists'''
def __init__(self, message):
super(WindowsAzureConflictError, self).__init__(message)
class WindowsAzureMissingResourceError(WindowsAzureError):
'''Indicates that a request for a request for a resource (queue, table,
container, etc...) failed because the specified resource does not exist'''
def __init__(self, message):
super(WindowsAzureMissingResourceError, self).__init__(message)
class WindowsAzureBatchOperationError(WindowsAzureError):
'''Indicates that a batch operation failed'''
def __init__(self, message, code):
super(WindowsAzureBatchOperationError, self).__init__(message)
self.code = code
class Feed(object):
pass
class _Base64String(str):
pass
class HeaderDict(dict):
def __getitem__(self, index):
return super(HeaderDict, self).__getitem__(index.lower())
def _encode_base64(data):
if isinstance(data, _unicode_type):
data = data.encode('utf-8')
encoded = base64.b64encode(data)
return encoded.decode('utf-8')
def _decode_base64_to_bytes(data):
if isinstance(data, _unicode_type):
data = data.encode('utf-8')
return base64.b64decode(data)
def _decode_base64_to_text(data):
decoded_bytes = _decode_base64_to_bytes(data)
return decoded_bytes.decode('utf-8')
def _get_readable_id(id_name, id_prefix_to_skip):
"""simplified an id to be more friendly for us people"""
# id_name is in the form 'https://namespace.host.suffix/name'
# where name may contain a forward slash!
pos = id_name.find('//')
if pos != -1:
pos += 2
if id_prefix_to_skip:
pos = id_name.find(id_prefix_to_skip, pos)
if pos != -1:
pos += len(id_prefix_to_skip)
pos = id_name.find('/', pos)
if pos != -1:
return id_name[pos + 1:]
return id_name
def _get_entry_properties_from_node(entry, include_id, id_prefix_to_skip=None, use_title_as_id=False):
''' get properties from entry xml '''
properties = {}
etag = entry.getAttributeNS(METADATA_NS, 'etag')
if etag:
properties['etag'] = etag
for updated in _get_child_nodes(entry, 'updated'):
properties['updated'] = updated.firstChild.nodeValue
for name in _get_children_from_path(entry, 'author', 'name'):
if name.firstChild is not None:
properties['author'] = name.firstChild.nodeValue
if include_id:
if use_title_as_id:
for title in _get_child_nodes(entry, 'title'):
properties['name'] = title.firstChild.nodeValue
else:
for id in _get_child_nodes(entry, 'id'):
properties['name'] = _get_readable_id(
id.firstChild.nodeValue, id_prefix_to_skip)
return properties
def _get_entry_properties(xmlstr, include_id, id_prefix_to_skip=None):
''' get properties from entry xml '''
xmldoc = minidom.parseString(xmlstr)
properties = {}
for entry in _get_child_nodes(xmldoc, 'entry'):
properties.update(_get_entry_properties_from_node(entry, include_id, id_prefix_to_skip))
return properties
def _get_first_child_node_value(parent_node, node_name):
xml_attrs = _get_child_nodes(parent_node, node_name)
if xml_attrs:
xml_attr = xml_attrs[0]
if xml_attr.firstChild:
value = xml_attr.firstChild.nodeValue
return value
def _get_child_nodes(node, tagName):
return [childNode for childNode in node.getElementsByTagName(tagName)
if childNode.parentNode == node]
def _get_children_from_path(node, *path):
'''descends through a hierarchy of nodes returning the list of children
at the inner most level. Only returns children who share a common parent,
not cousins.'''
cur = node
for index, child in enumerate(path):
if isinstance(child, _strtype):
next = _get_child_nodes(cur, child)
else:
next = _get_child_nodesNS(cur, *child)
if index == len(path) - 1:
return next
elif not next:
break
cur = next[0]
return []
def _get_child_nodesNS(node, ns, tagName):
return [childNode for childNode in node.getElementsByTagNameNS(ns, tagName)
if childNode.parentNode == node]
def _create_entry(entry_body):
''' Adds common part of entry to a given entry body and return the whole
xml. '''
updated_str = datetime.utcnow().isoformat()
if datetime.utcnow().utcoffset() is None:
updated_str += '+00:00'
entry_start = '''<?xml version="1.0" encoding="utf-8" standalone="yes"?>
<entry xmlns:d="http://schemas.microsoft.com/ado/2007/08/dataservices" xmlns:m="http://schemas.microsoft.com/ado/2007/08/dataservices/metadata" xmlns="http://www.w3.org/2005/Atom" >
<title /><updated>{updated}</updated><author><name /></author><id />
<content type="application/xml">
{body}</content></entry>'''
return entry_start.format(updated=updated_str, body=entry_body)
def _to_datetime(strtime):
return datetime.strptime(strtime, "%Y-%m-%dT%H:%M:%S.%f")
_KNOWN_SERIALIZATION_XFORMS = {
'include_apis': 'IncludeAPIs',
'message_id': 'MessageId',
'content_md5': 'Content-MD5',
'last_modified': 'Last-Modified',
'cache_control': 'Cache-Control',
'account_admin_live_email_id': 'AccountAdminLiveEmailId',
'service_admin_live_email_id': 'ServiceAdminLiveEmailId',
'subscription_id': 'SubscriptionID',
'fqdn': 'FQDN',
'private_id': 'PrivateID',
'os_virtual_hard_disk': 'OSVirtualHardDisk',
'logical_disk_size_in_gb': 'LogicalDiskSizeInGB',
'logical_size_in_gb': 'LogicalSizeInGB',
'os': 'OS',
'persistent_vm_downtime_info': 'PersistentVMDowntimeInfo',
'copy_id': 'CopyId',
}
def _get_serialization_name(element_name):
"""converts a Python name into a serializable name"""
known = _KNOWN_SERIALIZATION_XFORMS.get(element_name)
if known is not None:
return known
if element_name.startswith('x_ms_'):
return element_name.replace('_', '-')
if element_name.endswith('_id'):
element_name = element_name.replace('_id', 'ID')
for name in ['content_', 'last_modified', 'if_', 'cache_control']:
if element_name.startswith(name):
element_name = element_name.replace('_', '-_')
return ''.join(name.capitalize() for name in element_name.split('_'))
if sys.version_info < (3,):
_unicode_type = unicode
def _str(value):
if isinstance(value, unicode):
return value.encode('utf-8')
return str(value)
else:
_str = str
_unicode_type = str
def _str_or_none(value):
if value is None:
return None
return _str(value)
def _int_or_none(value):
if value is None:
return None
return str(int(value))
def _bool_or_none(value):
if value is None:
return None
if isinstance(value, bool):
if value:
return 'true'
else:
return 'false'
return str(value)
def _convert_class_to_xml(source, xml_prefix=True):
if source is None:
return ''
xmlstr = ''
if xml_prefix:
xmlstr = '<?xml version="1.0" encoding="utf-8"?>'
if isinstance(source, list):
for value in source:
xmlstr += _convert_class_to_xml(value, False)
elif isinstance(source, WindowsAzureData):
class_name = source.__class__.__name__
xmlstr += '<' + class_name + '>'
for name, value in vars(source).items():
if value is not None:
if isinstance(value, list) or \
isinstance(value, WindowsAzureData):
xmlstr += _convert_class_to_xml(value, False)
else:
xmlstr += ('<' + _get_serialization_name(name) + '>' +
xml_escape(str(value)) + '</' +
_get_serialization_name(name) + '>')
xmlstr += '</' + class_name + '>'
return xmlstr
def _find_namespaces_from_child(parent, child, namespaces):
"""Recursively searches from the parent to the child,
gathering all the applicable namespaces along the way"""
for cur_child in parent.childNodes:
if cur_child is child:
return True
if _find_namespaces_from_child(cur_child, child, namespaces):
# we are the parent node
for key in cur_child.attributes.keys():
if key.startswith('xmlns:') or key == 'xmlns':
namespaces[key] = cur_child.attributes[key]
break
return False
def _find_namespaces(parent, child):
res = {}
for key in parent.documentElement.attributes.keys():
if key.startswith('xmlns:') or key == 'xmlns':
res[key] = parent.documentElement.attributes[key]
_find_namespaces_from_child(parent, child, res)
return res
def _clone_node_with_namespaces(node_to_clone, original_doc):
clone = node_to_clone.cloneNode(True)
for key, value in _find_namespaces(original_doc, node_to_clone).items():
clone.attributes[key] = value
return clone
def _convert_response_to_feeds(response, convert_callback):
if response is None:
return None
feeds = _list_of(Feed)
x_ms_continuation = HeaderDict()
for name, value in response.headers:
if 'x-ms-continuation' in name:
x_ms_continuation[name[len('x-ms-continuation') + 1:]] = value
if x_ms_continuation:
setattr(feeds, 'x_ms_continuation', x_ms_continuation)
xmldoc = minidom.parseString(response.body)
xml_entries = _get_children_from_path(xmldoc, 'feed', 'entry')
if not xml_entries:
# in some cases, response contains only entry but no feed
xml_entries = _get_children_from_path(xmldoc, 'entry')
if inspect.isclass(convert_callback) and issubclass(convert_callback, WindowsAzureData):
for xml_entry in xml_entries:
return_obj = convert_callback()
for node in _get_children_from_path(xml_entry,
'content',
convert_callback.__name__):
_fill_data_to_return_object(node, return_obj)
for name, value in _get_entry_properties_from_node(xml_entry,
include_id=True,
use_title_as_id=True).items():
setattr(return_obj, name, value)
feeds.append(return_obj)
else:
for xml_entry in xml_entries:
new_node = _clone_node_with_namespaces(xml_entry, xmldoc)
feeds.append(convert_callback(new_node.toxml('utf-8')))
return feeds
def _validate_type_bytes(param_name, param):
if not isinstance(param, bytes):
raise TypeError(_ERROR_VALUE_SHOULD_BE_BYTES.format(param_name))
def _validate_not_none(param_name, param):
if param is None:
raise TypeError(_ERROR_VALUE_NONE.format(param_name))
def _fill_list_of(xmldoc, element_type, xml_element_name):
xmlelements = _get_child_nodes(xmldoc, xml_element_name)
return [_parse_response_body_from_xml_node(xmlelement, element_type) \
for xmlelement in xmlelements]
def _fill_scalar_list_of(xmldoc, element_type, parent_xml_element_name,
xml_element_name):
'''Converts an xml fragment into a list of scalar types. The parent xml
element contains a flat list of xml elements which are converted into the
specified scalar type and added to the list.
Example:
xmldoc=
<Endpoints>
<Endpoint>http://{storage-service-name}.blob.core.windows.net/</Endpoint>
<Endpoint>http://{storage-service-name}.queue.core.windows.net/</Endpoint>
<Endpoint>http://{storage-service-name}.table.core.windows.net/</Endpoint>
</Endpoints>
element_type=str
parent_xml_element_name='Endpoints'
xml_element_name='Endpoint'
'''
xmlelements = _get_child_nodes(xmldoc, parent_xml_element_name)
if xmlelements:
xmlelements = _get_child_nodes(xmlelements[0], xml_element_name)
return [_get_node_value(xmlelement, element_type) \
for xmlelement in xmlelements]
def _fill_dict(xmldoc, element_name):
xmlelements = _get_child_nodes(xmldoc, element_name)
if xmlelements:
return_obj = {}
for child in xmlelements[0].childNodes:
if child.firstChild:
return_obj[child.nodeName] = child.firstChild.nodeValue
return return_obj
def _fill_dict_of(xmldoc, parent_xml_element_name, pair_xml_element_name,
key_xml_element_name, value_xml_element_name):
'''Converts an xml fragment into a dictionary. The parent xml element
contains a list of xml elements where each element has a child element for
the key, and another for the value.
Example:
xmldoc=
<ExtendedProperties>
<ExtendedProperty>
<Name>Ext1</Name>
<Value>Val1</Value>
</ExtendedProperty>
<ExtendedProperty>
<Name>Ext2</Name>
<Value>Val2</Value>
</ExtendedProperty>
</ExtendedProperties>
element_type=str
parent_xml_element_name='ExtendedProperties'
pair_xml_element_name='ExtendedProperty'
key_xml_element_name='Name'
value_xml_element_name='Value'
'''
return_obj = {}
xmlelements = _get_child_nodes(xmldoc, parent_xml_element_name)
if xmlelements:
xmlelements = _get_child_nodes(xmlelements[0], pair_xml_element_name)
for pair in xmlelements:
keys = _get_child_nodes(pair, key_xml_element_name)
values = _get_child_nodes(pair, value_xml_element_name)
if keys and values:
key = keys[0].firstChild.nodeValue
value = values[0].firstChild.nodeValue
return_obj[key] = value
return return_obj
def _fill_instance_child(xmldoc, element_name, return_type):
'''Converts a child of the current dom element to the specified type.
'''
xmlelements = _get_child_nodes(
xmldoc, _get_serialization_name(element_name))
if not xmlelements:
return None
return_obj = return_type()
_fill_data_to_return_object(xmlelements[0], return_obj)
return return_obj
def _fill_instance_element(element, return_type):
"""Converts a DOM element into the specified object"""
return _parse_response_body_from_xml_node(element, return_type)
def _fill_data_minidom(xmldoc, element_name, data_member):
xmlelements = _get_child_nodes(
xmldoc, _get_serialization_name(element_name))
if not xmlelements or not xmlelements[0].childNodes:
return None
value = xmlelements[0].firstChild.nodeValue
if data_member is None:
return value
elif isinstance(data_member, datetime):
return _to_datetime(value)
elif type(data_member) is bool:
return value.lower() != 'false'
else:
return type(data_member)(value)
def _get_node_value(xmlelement, data_type):
value = xmlelement.firstChild.nodeValue
if data_type is datetime:
return _to_datetime(value)
elif data_type is bool:
return value.lower() != 'false'
else:
return data_type(value)
def _get_request_body_bytes_only(param_name, param_value):
'''Validates the request body passed in and converts it to bytes
if our policy allows it.'''
if param_value is None:
return b''
if isinstance(param_value, bytes):
return param_value
# Previous versions of the SDK allowed data types other than bytes to be
# passed in, and they would be auto-converted to bytes. We preserve this
# behavior when running under 2.7, but issue a warning.
# Python 3 support is new, so we reject anything that's not bytes.
if sys.version_info < (3,):
warnings.warn(_WARNING_VALUE_SHOULD_BE_BYTES.format(param_name))
return _get_request_body(param_value)
raise TypeError(_ERROR_VALUE_SHOULD_BE_BYTES.format(param_name))
def _get_request_body(request_body):
'''Converts an object into a request body. If it's None
we'll return an empty string, if it's one of our objects it'll
convert it to XML and return it. Otherwise we just use the object
directly'''
if request_body is None:
return b''
if isinstance(request_body, WindowsAzureData):
request_body = _convert_class_to_xml(request_body)
if isinstance(request_body, bytes):
return request_body
if isinstance(request_body, _unicode_type):
return request_body.encode('utf-8')
request_body = str(request_body)
if isinstance(request_body, _unicode_type):
return request_body.encode('utf-8')
return request_body
def _parse_enum_results_list(response, return_type, resp_type, item_type):
"""resp_body is the XML we received
resp_type is a string, such as Containers,
return_type is the type we're constructing, such as ContainerEnumResults
item_type is the type object of the item to be created, such as Container
This function then returns a ContainerEnumResults object with the
containers member populated with the results.
"""
# parsing something like:
# <EnumerationResults ... >
# <Queues>
# <Queue>
# <Something />
# <SomethingElse />
# </Queue>
# </Queues>
# </EnumerationResults>
respbody = response.body
return_obj = return_type()
doc = minidom.parseString(respbody)
items = []
for enum_results in _get_child_nodes(doc, 'EnumerationResults'):
# path is something like Queues, Queue
for child in _get_children_from_path(enum_results,
resp_type,
resp_type[:-1]):
items.append(_fill_instance_element(child, item_type))
for name, value in vars(return_obj).items():
# queues, Queues, this is the list its self which we populated
# above
if name == resp_type.lower():
# the list its self.
continue
value = _fill_data_minidom(enum_results, name, value)
if value is not None:
setattr(return_obj, name, value)
setattr(return_obj, resp_type.lower(), items)
return return_obj
def _parse_simple_list(response, type, item_type, list_name):
respbody = response.body
res = type()
res_items = []
doc = minidom.parseString(respbody)
type_name = type.__name__
item_name = item_type.__name__
for item in _get_children_from_path(doc, type_name, item_name):
res_items.append(_fill_instance_element(item, item_type))
setattr(res, list_name, res_items)
return res
def _parse_response(response, return_type):
'''
Parse the HTTPResponse's body and fill all the data into a class of
return_type.
'''
return _parse_response_body_from_xml_text(response.body, return_type)
def _parse_service_resources_response(response, return_type):
'''
Parse the HTTPResponse's body and fill all the data into a class of
return_type.
'''
return _parse_response_body_from_service_resources_xml_text(response.body, return_type)
def _fill_data_to_return_object(node, return_obj):
members = dict(vars(return_obj))
for name, value in members.items():
if isinstance(value, _list_of):
setattr(return_obj,
name,
_fill_list_of(node,
value.list_type,
value.xml_element_name))
elif isinstance(value, _scalar_list_of):
setattr(return_obj,
name,
_fill_scalar_list_of(node,
value.list_type,
_get_serialization_name(name),
value.xml_element_name))
elif isinstance(value, _dict_of):
setattr(return_obj,
name,
_fill_dict_of(node,
_get_serialization_name(name),
value.pair_xml_element_name,
value.key_xml_element_name,
value.value_xml_element_name))
elif isinstance(value, _xml_attribute):
real_value = None
if node.hasAttribute(value.xml_element_name):
real_value = node.getAttribute(value.xml_element_name)
if real_value is not None:
setattr(return_obj, name, real_value)
elif isinstance(value, WindowsAzureData):
setattr(return_obj,
name,
_fill_instance_child(node, name, value.__class__))
elif isinstance(value, dict):
setattr(return_obj,
name,
_fill_dict(node, _get_serialization_name(name)))
elif isinstance(value, _Base64String):
value = _fill_data_minidom(node, name, '')
if value is not None:
value = _decode_base64_to_text(value)
# always set the attribute, so we don't end up returning an object
# with type _Base64String
setattr(return_obj, name, value)
else:
value = _fill_data_minidom(node, name, value)
if value is not None:
setattr(return_obj, name, value)
def _parse_response_body_from_xml_node(node, return_type):
'''
parse the xml and fill all the data into a class of return_type
'''
return_obj = return_type()
_fill_data_to_return_object(node, return_obj)
return return_obj
def _parse_response_body_from_xml_text(respbody, return_type):
'''
parse the xml and fill all the data into a class of return_type
'''
doc = minidom.parseString(respbody)
return_obj = return_type()
xml_name = return_type._xml_name if hasattr(return_type, '_xml_name') else return_type.__name__
for node in _get_child_nodes(doc, xml_name):
_fill_data_to_return_object(node, return_obj)
return return_obj
def _parse_response_body_from_service_resources_xml_text(respbody, return_type):
'''
parse the xml and fill all the data into a class of return_type
'''
doc = minidom.parseString(respbody)
return_obj = _list_of(return_type)
for node in _get_children_from_path(doc, "ServiceResources", "ServiceResource"):
local_obj = return_type()
_fill_data_to_return_object(node, local_obj)
return_obj.append(local_obj)
return return_obj
class _dict_of(dict):
"""a dict which carries with it the xml element names for key,val.
Used for deserializaion and construction of the lists"""
def __init__(self, pair_xml_element_name, key_xml_element_name,
value_xml_element_name):
self.pair_xml_element_name = pair_xml_element_name
self.key_xml_element_name = key_xml_element_name
self.value_xml_element_name = value_xml_element_name
super(_dict_of, self).__init__()
class _list_of(list):
"""a list which carries with it the type that's expected to go in it.
Used for deserializaion and construction of the lists"""
def __init__(self, list_type, xml_element_name=None):
self.list_type = list_type
if xml_element_name is None:
self.xml_element_name = list_type.__name__
else:
self.xml_element_name = xml_element_name
super(_list_of, self).__init__()
class _scalar_list_of(list):
"""a list of scalar types which carries with it the type that's
expected to go in it along with its xml element name.
Used for deserializaion and construction of the lists"""
def __init__(self, list_type, xml_element_name):
self.list_type = list_type
self.xml_element_name = xml_element_name
super(_scalar_list_of, self).__init__()
class _xml_attribute:
"""a accessor to XML attributes
expected to go in it along with its xml element name.
Used for deserialization and construction"""
def __init__(self, xml_element_name):
self.xml_element_name = xml_element_name
def _update_request_uri_query_local_storage(request, use_local_storage):
''' create correct uri and query for the request '''
uri, query = _update_request_uri_query(request)
if use_local_storage:
return '/' + DEV_ACCOUNT_NAME + uri, query
return uri, query
def _update_request_uri_query(request):
'''pulls the query string out of the URI and moves it into
the query portion of the request object. If there are already
query parameters on the request the parameters in the URI will
appear after the existing parameters'''
if '?' in request.path:
request.path, _, query_string = request.path.partition('?')
if query_string:
query_params = query_string.split('&')
for query in query_params:
if '=' in query:
name, _, value = query.partition('=')
request.query.append((name, value))
request.path = url_quote(request.path, '/()$=\',')
# add encoded queries to request.path.
if request.query:
request.path += '?'
for name, value in request.query:
if value is not None:
request.path += name + '=' + url_quote(value, '/()$=\',') + '&'
request.path = request.path[:-1]
return request.path, request.query
def _dont_fail_on_exist(error):
''' don't throw exception if the resource exists.
This is called by create_* APIs with fail_on_exist=False'''
if isinstance(error, WindowsAzureConflictError):
return False
else:
raise error
def _dont_fail_not_exist(error):
''' don't throw exception if the resource doesn't exist.
This is called by create_* APIs with fail_on_exist=False'''
if isinstance(error, WindowsAzureMissingResourceError):
return False
else:
raise error
def _general_error_handler(http_error):
''' Simple error handler for azure.'''
if http_error.status == 409:
raise WindowsAzureConflictError(
_ERROR_CONFLICT.format(str(http_error)))
elif http_error.status == 404:
raise WindowsAzureMissingResourceError(
_ERROR_NOT_FOUND.format(str(http_error)))
else:
if http_error.respbody is not None:
raise WindowsAzureError(
_ERROR_UNKNOWN.format(str(http_error)) + '\n' + \
http_error.respbody.decode('utf-8'))
else:
raise WindowsAzureError(_ERROR_UNKNOWN.format(str(http_error)))
def _parse_response_for_dict(response):
''' Extracts name-values from response header. Filter out the standard
http headers.'''
if response is None:
return None
http_headers = ['server', 'date', 'location', 'host',
'via', 'proxy-connection', 'connection']
return_dict = HeaderDict()
if response.headers:
for name, value in response.headers:
if not name.lower() in http_headers:
return_dict[name] = value
return return_dict
def _parse_response_for_dict_prefix(response, prefixes):
''' Extracts name-values for names starting with prefix from response
header. Filter out the standard http headers.'''
if response is None:
return None
return_dict = {}
orig_dict = _parse_response_for_dict(response)
if orig_dict:
for name, value in orig_dict.items():
for prefix_value in prefixes:
if name.lower().startswith(prefix_value.lower()):
return_dict[name] = value
break
return return_dict
else:
return None
def _parse_response_for_dict_filter(response, filter):
''' Extracts name-values for names in filter from response header. Filter
out the standard http headers.'''
if response is None:
return None
return_dict = {}
orig_dict = _parse_response_for_dict(response)
if orig_dict:
for name, value in orig_dict.items():
if name.lower() in filter:
return_dict[name] = value
return return_dict
else:
return None
def _sign_string(key, string_to_sign, key_is_base64=True):
if key_is_base64:
key = _decode_base64_to_bytes(key)
else:
if isinstance(key, _unicode_type):
key = key.encode('utf-8')
if isinstance(string_to_sign, _unicode_type):
string_to_sign = string_to_sign.encode('utf-8')
signed_hmac_sha256 = hmac.HMAC(key, string_to_sign, hashlib.sha256)
digest = signed_hmac_sha256.digest()
encoded_digest = _encode_base64(digest)
return encoded_digest
| apache-2.0 |
koobonil/Boss2D | Boss2D/addon/_old/webrtc-qt5.11.2_for_boss/tools_webrtc/cpu/cpu_mon.py | 6 | 2057 | #!/usr/bin/env python
#
# Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
# that can be found in the LICENSE file in the root of the source
# tree. An additional intellectual property rights grant can be found
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
import psutil
import sys
import numpy
from matplotlib import pyplot
class CpuSnapshot(object):
def __init__(self, label):
self.label = label
self.samples = []
def Capture(self, sample_count):
print ('Capturing %d CPU samples for %s...' %
((sample_count - len(self.samples)), self.label))
while len(self.samples) < sample_count:
self.samples.append(psutil.cpu_percent(1.0, False))
def Text(self):
return ('%s: avg=%s, median=%s, min=%s, max=%s' %
(self.label, numpy.average(self.samples),
numpy.median(self.samples),
numpy.min(self.samples), numpy.max(self.samples)))
def Max(self):
return numpy.max(self.samples)
def GrabCpuSamples(sample_count):
print 'Label for snapshot (enter to quit): '
label = raw_input().strip()
if len(label) == 0:
return None
snapshot = CpuSnapshot(label)
snapshot.Capture(sample_count)
return snapshot
def main():
print 'How many seconds to capture per snapshot (enter for 60)?'
sample_count = raw_input().strip()
if len(sample_count) > 0 and int(sample_count) > 0:
sample_count = int(sample_count)
else:
print 'Defaulting to 60 samples.'
sample_count = 60
snapshots = []
while True:
snapshot = GrabCpuSamples(sample_count)
if snapshot == None:
break
snapshots.append(snapshot)
if len(snapshots) == 0:
print 'no samples captured'
return -1
pyplot.title('CPU usage')
for s in snapshots:
pyplot.plot(s.samples, label=s.Text(), linewidth=2)
pyplot.legend()
pyplot.show()
return 0
if __name__ == '__main__':
sys.exit(main())
| mit |
spark-test/spark | examples/src/main/python/mllib/kmeans.py | 51 | 1552 | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
A K-means clustering program using MLlib.
This example requires NumPy (http://www.numpy.org/).
"""
from __future__ import print_function
import sys
import numpy as np
from pyspark import SparkContext
from pyspark.mllib.clustering import KMeans
def parseVector(line):
return np.array([float(x) for x in line.split(' ')])
if __name__ == "__main__":
if len(sys.argv) != 3:
print("Usage: kmeans <file> <k>", file=sys.stderr)
sys.exit(-1)
sc = SparkContext(appName="KMeans")
lines = sc.textFile(sys.argv[1])
data = lines.map(parseVector)
k = int(sys.argv[2])
model = KMeans.train(data, k)
print("Final centers: " + str(model.clusterCenters))
print("Total Cost: " + str(model.computeCost(data)))
sc.stop()
| apache-2.0 |
PriceChild/ansible | lib/ansible/modules/windows/win_user.py | 56 | 4540 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2014, Matt Martz <matt@sivel.net>, and others
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# this is a windows documentation stub. actual code lives in the .ps1
# file of the same name
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['stableinterface'],
'supported_by': 'core'}
DOCUMENTATION = r'''
---
module: win_user
version_added: "1.7"
short_description: Manages local Windows user accounts
description:
- Manages local Windows user accounts
options:
name:
description:
- Name of the user to create, remove or modify.
required: true
fullname:
description:
- Full name of the user
required: false
default: null
version_added: "1.9"
description:
description:
- Description of the user
required: false
default: null
version_added: "1.9"
password:
description:
- Optionally set the user's password to this (plain text) value.
required: false
default: null
update_password:
description:
- C(always) will update passwords if they differ. C(on_create) will
only set the password for newly created users.
required: false
choices: [ 'always', 'on_create' ]
default: always
version_added: "1.9"
password_expired:
description:
- C(yes) will require the user to change their password at next login.
C(no) will clear the expired password flag.
required: false
choices: [ 'yes', 'no' ]
default: null
version_added: "1.9"
password_never_expires:
description:
- C(yes) will set the password to never expire. C(no) will allow the
password to expire.
required: false
choices: [ 'yes', 'no' ]
default: null
version_added: "1.9"
user_cannot_change_password:
description:
- C(yes) will prevent the user from changing their password. C(no) will
allow the user to change their password.
required: false
choices: [ 'yes', 'no' ]
default: null
version_added: "1.9"
account_disabled:
description:
- C(yes) will disable the user account. C(no) will clear the disabled
flag.
required: false
choices: [ 'yes', 'no' ]
default: null
version_added: "1.9"
account_locked:
description:
- C(no) will unlock the user account if locked.
required: false
choices: [ 'no' ]
default: null
version_added: "1.9"
groups:
description:
- Adds or removes the user from this comma-separated lis of groups,
depending on the value of I(groups_action). When I(groups_action) is
C(replace) and I(groups) is set to the empty string ('groups='), the
user is removed from all groups.
required: false
version_added: "1.9"
groups_action:
description:
- If C(replace), the user is added as a member of each group in
I(groups) and removed from any other groups. If C(add), the user is
added to each group in I(groups) where not already a member. If
C(remove), the user is removed from each group in I(groups).
required: false
choices: [ "replace", "add", "remove" ]
default: "replace"
version_added: "1.9"
state:
description:
- When C(present), creates or updates the user account. When C(absent),
removes the user account if it exists. When C(query) (new in 1.9),
retrieves the user account details without making any changes.
required: false
choices:
- present
- absent
- query
default: present
aliases: []
author:
- "Paul Durivage (@angstwad)"
- "Chris Church (@cchurch)"
'''
EXAMPLES = r'''
- name: Ensure user bob is present
win_user:
name: bob
password: B0bP4ssw0rd
state: present
groups:
- Users
- name: Ensure user bob is absent
win_user:
name: bob
state: absent
'''
| gpl-3.0 |
frdb194/django | tests/managers_regress/models.py | 245 | 3566 | """
Various edge-cases for model managers.
"""
from django.contrib.contenttypes.fields import (
GenericForeignKey, GenericRelation,
)
from django.contrib.contenttypes.models import ContentType
from django.db import models
from django.utils.encoding import force_text, python_2_unicode_compatible
class OnlyFred(models.Manager):
def get_queryset(self):
return super(OnlyFred, self).get_queryset().filter(name='fred')
class OnlyBarney(models.Manager):
def get_queryset(self):
return super(OnlyBarney, self).get_queryset().filter(name='barney')
class Value42(models.Manager):
def get_queryset(self):
return super(Value42, self).get_queryset().filter(value=42)
class AbstractBase1(models.Model):
name = models.CharField(max_length=50)
class Meta:
abstract = True
# Custom managers
manager1 = OnlyFred()
manager2 = OnlyBarney()
objects = models.Manager()
class AbstractBase2(models.Model):
value = models.IntegerField()
class Meta:
abstract = True
# Custom manager
restricted = Value42()
# No custom manager on this class to make sure the default case doesn't break.
class AbstractBase3(models.Model):
comment = models.CharField(max_length=50)
class Meta:
abstract = True
@python_2_unicode_compatible
class Parent(models.Model):
name = models.CharField(max_length=50)
manager = OnlyFred()
def __str__(self):
return self.name
# Managers from base classes are inherited and, if no manager is specified
# *and* the parent has a manager specified, the first one (in the MRO) will
# become the default.
@python_2_unicode_compatible
class Child1(AbstractBase1):
data = models.CharField(max_length=25)
def __str__(self):
return self.data
@python_2_unicode_compatible
class Child2(AbstractBase1, AbstractBase2):
data = models.CharField(max_length=25)
def __str__(self):
return self.data
@python_2_unicode_compatible
class Child3(AbstractBase1, AbstractBase3):
data = models.CharField(max_length=25)
def __str__(self):
return self.data
@python_2_unicode_compatible
class Child4(AbstractBase1):
data = models.CharField(max_length=25)
# Should be the default manager, although the parent managers are
# inherited.
default = models.Manager()
def __str__(self):
return self.data
@python_2_unicode_compatible
class Child5(AbstractBase3):
name = models.CharField(max_length=25)
default = OnlyFred()
objects = models.Manager()
def __str__(self):
return self.name
# Will inherit managers from AbstractBase1, but not Child4.
class Child6(Child4):
value = models.IntegerField()
# Will not inherit default manager from parent.
class Child7(Parent):
pass
# RelatedManagers
@python_2_unicode_compatible
class RelatedModel(models.Model):
test_gfk = GenericRelation('RelationModel', content_type_field='gfk_ctype', object_id_field='gfk_id')
exact = models.NullBooleanField()
def __str__(self):
return force_text(self.pk)
@python_2_unicode_compatible
class RelationModel(models.Model):
fk = models.ForeignKey(RelatedModel, models.CASCADE, related_name='test_fk')
m2m = models.ManyToManyField(RelatedModel, related_name='test_m2m')
gfk_ctype = models.ForeignKey(ContentType, models.SET_NULL, null=True)
gfk_id = models.IntegerField(null=True)
gfk = GenericForeignKey(ct_field='gfk_ctype', fk_field='gfk_id')
def __str__(self):
return force_text(self.pk)
| bsd-3-clause |
hkupty/python-mode | pymode/libs2/rope/contrib/finderrors.py | 93 | 2948 | """Finding bad name and attribute accesses
`find_errors` function can be used to find possible bad name and
attribute accesses. As an example::
errors = find_errors(project, project.get_resource('mod.py'))
for error in errors:
print '%s: %s' % (error.lineno, error.error)
prints possible errors for ``mod.py`` file.
TODO:
* use task handles
* reporting names at most once
* attributes of extension modules that don't appear in
extension_modules project config can be ignored
* not calling `PyScope.get_inner_scope_for_line()` if it is a
bottleneck; needs profiling
* not reporting occurrences where rope cannot infer the object
* rope saves multiple objects for some of the names in its objectdb
use all of them not to give false positives
* ... ;-)
"""
from rope.base import ast, evaluate, pyobjects
def find_errors(project, resource):
"""Find possible bad name and attribute accesses
It returns a list of `Error`\s.
"""
pymodule = project.pycore.resource_to_pyobject(resource)
finder = _BadAccessFinder(pymodule)
ast.walk(pymodule.get_ast(), finder)
return finder.errors
class _BadAccessFinder(object):
def __init__(self, pymodule):
self.pymodule = pymodule
self.scope = pymodule.get_scope()
self.errors = []
def _Name(self, node):
if isinstance(node.ctx, (ast.Store, ast.Param)):
return
scope = self.scope.get_inner_scope_for_line(node.lineno)
pyname = scope.lookup(node.id)
if pyname is None:
self._add_error(node, 'Unresolved variable')
elif self._is_defined_after(scope, pyname, node.lineno):
self._add_error(node, 'Defined later')
def _Attribute(self, node):
if not isinstance(node.ctx, ast.Store):
scope = self.scope.get_inner_scope_for_line(node.lineno)
pyname = evaluate.eval_node(scope, node.value)
if pyname is not None and \
pyname.get_object() != pyobjects.get_unknown():
if node.attr not in pyname.get_object():
self._add_error(node, 'Unresolved attribute')
ast.walk(node.value, self)
def _add_error(self, node, msg):
if isinstance(node, ast.Attribute):
name = node.attr
else:
name = node.id
if name != 'None':
error = Error(node.lineno, msg + ' ' + name)
self.errors.append(error)
def _is_defined_after(self, scope, pyname, lineno):
location = pyname.get_definition_location()
if location is not None and location[1] is not None:
if location[0] == self.pymodule and \
lineno <= location[1] <= scope.get_end():
return True
class Error(object):
def __init__(self, lineno, error):
self.lineno = lineno
self.error = error
def __str__(self):
return '%s: %s' % (self.lineno, self.error)
| lgpl-3.0 |
c-goosen/ctpug_11_july | flask/__init__.py | 1 | 4174 | from flask import Flask
from flask_bootstrap import Bootstrap
from flask import render_template
import bootstrap
import xmlrpclib
from io import BytesIO
import base64
app = Flask(__name__)
import xmlrpclib
username = 'username' #the user
pwd = 'password' #the password of the user
dbname = 'ctpug' #the database
sock_common = xmlrpclib.ServerProxy ('http://127.0.0.1:8069/xmlrpc/common')
uid = sock_common.login(dbname, username, pwd)
#replace localhost with the address of the server
sock = xmlrpclib.ServerProxy('http://127.0.0.1:8069/xmlrpc/object')
def test_connection(username,pwd,dbname):
connection_reply = 'Connection to Odoo - '
args = [] #query clause
ids = sock.execute(dbname, uid, pwd, 'res.partner', 'search', args)
fields = ['name', 'id', 'email'] #fields to read
data = sock.execute(dbname, uid, pwd, 'res.partner', 'read', ids, fields)
if data[0].name == 'admin':
connection_reply + 'successful'
else:
connection_reply + 'not successful'
return connection_reply
def get_products(username,pwd,dbname):
args = [] #query clause
ids = sock.execute(dbname, uid, pwd, 'product.product', 'search', args)
fields = ['id', 'lst_price', 'qty_available', 'product_tmpl_id'] #fields to read
data = sock.execute(dbname, uid, pwd, 'product.product', 'read', ids, fields)
return data
def get_product_templates(username,pwd,dbname, args):
args = args or [] #query clause
ids = sock.execute(dbname, uid, pwd, 'product.template', 'search', args)
fields = ['id', 'name', 'image_medium'] #fields to read
data = sock.execute(dbname, uid, pwd, 'product.template', 'read', ids, fields)
return data
def get_company_currency(username,pwd,dbname):
args = []
ids = sock.execute(dbname, uid, pwd, 'res.company', 'search', [('id','=',1)])
fields = ['currency_id'] #fields to read
company = sock.execute(dbname, uid, pwd, 'res.company', 'read', ids, fields)
ids = sock.execute(dbname, uid, pwd, 'res.currency', 'search', [('id','=',company[0]['currency_id'][0])])
fields = ['symbol']
currency_symbol = sock.execute(dbname, uid, pwd, 'res.currency', 'read', ids, fields)
return currency_symbol[0]['symbol']
@app.route('/products')
def products():
product_output = 'List of products </br></br>'
product_product = get_products(username,pwd,dbname)
#product_template = get_product_templates(username,pwd,dbname)
count = 0
for x in product_product:
args = [('id', '=', x['product_tmpl_id'][0])]
product_template = get_product_templates(username,pwd,dbname,args)
#product_output = product_output + product_template[0]['name']
#product_output = ''+x['product_tmpl_id']
#for y in product_template:
#if x['product_tmpl_id'] == y['id']:
#product_output = '\n |' + product_output + str(x['id']) + y['name'] + "<img style='display:block; width:100px;height:100px;' id='base64image' src='data:image/jpeg;base64, %s'/>" % y['image_medium'] +' | \n'
if product_template[0]['image_medium']:
product_output += '\n' + str(product_product[count]['id']) +' ' + product_template[0]['name'] + ' ' + get_company_currency(username,pwd,dbname) + str(product_product[count]['lst_price']) + "<img style='display:block; width:100px;height:100px;' id='base64image' src='data:image/jpeg;base64, %s'/>" % product_template[0]['image_medium'] +' \n'
count += 1
return product_output
#return 'List of products %s' % data[0]['id']
@app.route('/')
def index():
connection_reply = 'Connection to Odoo - '
args = [] #query clauses
ids = sock.execute(dbname, uid, pwd, 'res.partner', 'search', args)
fields = ['name', 'id', 'email'] #fields to read
data = sock.execute(dbname, uid, pwd, 'res.partner', 'read', ids, fields)
#return 'Hello %s' %data[0]
if data[0]['id'] == 3:
connection_reply = '%s successful' % connection_reply
else:
connection_reply = '%s not successful' % connection_reply
return connection_reply
#return render_template('index.html', title='Home', connection_reply=connection_reply)
if __name__ == '__main__':
app.run(debug=True)
Bootstrap(app)
#return app
#self.settings()
#__main__.initiate_connection(username,pwd,dbname)
#__main__.test_connection(username,pwd,dbname)
| cc0-1.0 |
ahojjati/letsencrypt | letsencrypt/account.py | 10 | 7268 | """Creates ACME accounts for server."""
import datetime
import hashlib
import logging
import os
import socket
from cryptography.hazmat.primitives import serialization
import pyrfc3339
import pytz
import zope.component
from acme import fields as acme_fields
from acme import jose
from acme import messages
from letsencrypt import errors
from letsencrypt import interfaces
from letsencrypt import le_util
logger = logging.getLogger(__name__)
class Account(object): # pylint: disable=too-few-public-methods
"""ACME protocol registration.
:ivar .RegistrationResource regr: Registration Resource
:ivar .JWK key: Authorized Account Key
:ivar .Meta: Account metadata
:ivar str id: Globally unique account identifier.
"""
class Meta(jose.JSONObjectWithFields):
"""Account metadata
:ivar datetime.datetime creation_dt: Creation date and time (UTC).
:ivar str creation_host: FQDN of host, where account has been created.
.. note:: ``creation_dt`` and ``creation_host`` are useful in
cross-machine migration scenarios.
"""
creation_dt = acme_fields.RFC3339Field("creation_dt")
creation_host = jose.Field("creation_host")
def __init__(self, regr, key, meta=None):
self.key = key
self.regr = regr
self.meta = self.Meta(
# pyrfc3339 drops microseconds, make sure __eq__ is sane
creation_dt=datetime.datetime.now(
tz=pytz.UTC).replace(microsecond=0),
creation_host=socket.getfqdn()) if meta is None else meta
self.id = hashlib.md5( # pylint: disable=invalid-name
self.key.key.public_key().public_bytes(
encoding=serialization.Encoding.DER,
format=serialization.PublicFormat.SubjectPublicKeyInfo)
).hexdigest()
# Implementation note: Email? Multiple accounts can have the
# same email address. Registration URI? Assigned by the
# server, not guaranteed to be stable over time, nor
# cannonical URI can be generated. ACME protocol doesn't allow
# account key (and thus its fingerprint) to be updated...
@property
def slug(self):
"""Short account identification string, useful for UI."""
return "{1}@{0} ({2})".format(pyrfc3339.generate(
self.meta.creation_dt), self.meta.creation_host, self.id[:4])
def __repr__(self):
return "<{0}({1})>".format(self.__class__.__name__, self.id)
def __eq__(self, other):
return (isinstance(other, self.__class__) and
self.key == other.key and self.regr == other.regr and
self.meta == other.meta)
def report_new_account(acc, config):
"""Informs the user about their new Let's Encrypt account."""
reporter = zope.component.queryUtility(interfaces.IReporter)
if reporter is None:
return
reporter.add_message(
"Your account credentials have been saved in your Let's Encrypt "
"configuration directory at {0}. You should make a secure backup "
"of this folder now. This configuration directory will also "
"contain certificates and private keys obtained by Let's Encrypt "
"so making regular backups of this folder is ideal.".format(
config.config_dir),
reporter.MEDIUM_PRIORITY, True)
if acc.regr.body.emails:
recovery_msg = ("If you lose your account credentials, you can "
"recover through e-mails sent to {0}.".format(
", ".join(acc.regr.body.emails)))
reporter.add_message(recovery_msg, reporter.HIGH_PRIORITY, True)
class AccountMemoryStorage(interfaces.AccountStorage):
"""In-memory account strage."""
def __init__(self, initial_accounts=None):
self.accounts = initial_accounts if initial_accounts is not None else {}
def find_all(self):
return self.accounts.values()
def save(self, account):
if account.id in self.accounts:
logger.debug("Overwriting account: %s", account.id)
self.accounts[account.id] = account
def load(self, account_id):
try:
return self.accounts[account_id]
except KeyError:
raise errors.AccountNotFound(account_id)
class AccountFileStorage(interfaces.AccountStorage):
"""Accounts file storage.
:ivar .IConfig config: Client configuration
"""
def __init__(self, config):
le_util.make_or_verify_dir(config.accounts_dir, 0o700, os.geteuid())
self.config = config
def _account_dir_path(self, account_id):
return os.path.join(self.config.accounts_dir, account_id)
@classmethod
def _regr_path(cls, account_dir_path):
return os.path.join(account_dir_path, "regr.json")
@classmethod
def _key_path(cls, account_dir_path):
return os.path.join(account_dir_path, "private_key.json")
@classmethod
def _metadata_path(cls, account_dir_path):
return os.path.join(account_dir_path, "meta.json")
def find_all(self):
try:
candidates = os.listdir(self.config.accounts_dir)
except OSError:
return []
accounts = []
for account_id in candidates:
try:
accounts.append(self.load(account_id))
except errors.AccountStorageError:
logger.debug("Account loading problem", exc_info=True)
return accounts
def load(self, account_id):
account_dir_path = self._account_dir_path(account_id)
if not os.path.isdir(account_dir_path):
raise errors.AccountNotFound(
"Account at %s does not exist" % account_dir_path)
try:
with open(self._regr_path(account_dir_path)) as regr_file:
regr = messages.RegistrationResource.json_loads(regr_file.read())
with open(self._key_path(account_dir_path)) as key_file:
key = jose.JWK.json_loads(key_file.read())
with open(self._metadata_path(account_dir_path)) as metadata_file:
meta = Account.Meta.json_loads(metadata_file.read())
except IOError as error:
raise errors.AccountStorageError(error)
acc = Account(regr, key, meta)
if acc.id != account_id:
raise errors.AccountStorageError(
"Account ids mismatch (expected: {0}, found: {1}".format(
account_id, acc.id))
return acc
def save(self, account):
account_dir_path = self._account_dir_path(account.id)
le_util.make_or_verify_dir(account_dir_path, 0o700, os.geteuid())
try:
with open(self._regr_path(account_dir_path), "w") as regr_file:
regr_file.write(account.regr.json_dumps())
with le_util.safe_open(self._key_path(account_dir_path),
"w", chmod=0o400) as key_file:
key_file.write(account.key.json_dumps())
with open(self._metadata_path(account_dir_path), "w") as metadata_file:
metadata_file.write(account.meta.json_dumps())
except IOError as error:
raise errors.AccountStorageError(error)
| apache-2.0 |
juharris/tensorflow | tensorflow/contrib/tensor_forest/python/ops/inference_ops.py | 15 | 2143 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Ops for BrainTree v2 tree evaluation."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import threading
from tensorflow.python.framework import common_shapes
from tensorflow.python.framework import load_library
from tensorflow.python.framework import ops
from tensorflow.python.platform import resource_loader
from tensorflow.python.platform import tf_logging as logging
INFERENCE_OPS_FILE = '_inference_ops.so'
_inference_ops = None
_ops_lock = threading.Lock()
# TODO(b/31222613): This op may be differentiable, and there may be
# latent bugs here.
ops.NotDifferentiable('TreePredictions')
ops.RegisterShape('TreePredictions')(common_shapes.call_cpp_shape_fn)
# Workaround for the fact that importing tensorflow imports contrib
# (even if a user isn't using this or any other contrib op), but
# there's not yet any guarantee that the shared object exists.
# In which case, "import tensorflow" will always crash, even for users that
# never use contrib.
def Load():
"""Load the inference ops library and return the loaded module."""
with _ops_lock:
global _inference_ops
if not _inference_ops:
ops_path = resource_loader.get_path_to_datafile(INFERENCE_OPS_FILE)
logging.info('data path: %s', ops_path)
_inference_ops = load_library.load_op_library(ops_path)
assert _inference_ops, 'Could not load inference_ops.so'
return _inference_ops
| apache-2.0 |
infrared5/massroute-pi | features/steps/bus_approaching_blink_steps.py | 1 | 1614 | from time import sleep
from app.component.modifier.blinker import Blinker
PINS = [0,1,2,3]
DELAY = 0.2
@given('A new Blinker instance provided with a Shifter reference')
def blinker_setup_with_shifter(context):
shifter = context.shifter
blinker = Blinker(shifter)
blinker.set_pins(PINS)
shifter.set_pins = MagicMock(return_value=None)
context.blinker = blinker
@when('Blinker:start() invoked')
def blinker_start(context):
context.blinker.start()
@then('Shifter:set_pins() invoked with 1 once')
def shifter_set_pins_on_once(context):
context.shifter.set_pins.assert_called_once_with(PINS, 1)
@given('A new Blinker instance with 0.2 second delay')
def blinker_setup_with_delay(context):
shifter = context.shifter
blinker = Blinker(shifter, DELAY)
blinker.set_pins(PINS)
shifter.set_pins = MagicMock(return_value=None)
context.blinker = blinker
@when('At least 0.2 seconds have lapsed')
def time_elapsed_two_milliseconds(context):
sleep(0.22)
@when('At least 0.4 seconds have lapsed')
def time_elapsed_four_milliseconds(context):
sleep(0.42)
@when('Blinker:stop() invoked')
def blinker_stop(context):
context.blinker.stop()
@then('Shifter:set_pins() invoked with 0')
def shifter_set_pins_off(context):
context.shifter.set_pins.assert_called_with(PINS, 0)
@then('Shifter:set_pins() invoked with 1 twice')
def shifter_set_pins_on_twice(context):
# once for off, twice for on
assert context.shifter.set_pins.call_count == 3
@then('Shifter:set_pins() not called more than once')
def shifter_set_pins_called_once(context):
context.shifter.set_pins.assert_called_once() | mit |
gregdek/ansible | lib/ansible/modules/network/cloudengine/ce_snmp_target_host.py | 25 | 32025 | #!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: ce_snmp_target_host
version_added: "2.4"
short_description: Manages SNMP target host configuration on HUAWEI CloudEngine switches.
description:
- Manages SNMP target host configurations on HUAWEI CloudEngine switches.
author:
- wangdezhuang (@QijunPan)
options:
version:
description:
- Version(s) Supported by SNMP Engine.
choices: ['none', 'v1', 'v2c', 'v3', 'v1v2c', 'v1v3', 'v2cv3', 'all']
connect_port:
description:
- Udp port used by SNMP agent to connect the Network management.
host_name:
description:
- Unique name to identify target host entry.
address:
description:
- Network Address.
notify_type:
description:
- To configure notify type as trap or inform.
choices: ['trap','inform']
vpn_name:
description:
- VPN instance Name.
recv_port:
description:
- UDP Port number used by network management to receive alarm messages.
security_model:
description:
- Security Model.
choices: ['v1','v2c', 'v3']
security_name:
description:
- Security Name.
security_name_v3:
description:
- Security Name V3.
security_level:
description:
- Security level indicating whether to use authentication and encryption.
choices: ['noAuthNoPriv','authentication', 'privacy']
is_public_net:
description:
- To enable or disable Public Net-manager for target Host.
default: no_use
choices: ['no_use','true','false']
interface_name:
description:
- Name of the interface to send the trap message.
'''
EXAMPLES = '''
- name: CloudEngine snmp target host test
hosts: cloudengine
connection: local
gather_facts: no
vars:
cli:
host: "{{ inventory_hostname }}"
port: "{{ ansible_ssh_port }}"
username: "{{ username }}"
password: "{{ password }}"
transport: cli
tasks:
- name: "Config SNMP version"
ce_snmp_target_host:
state: present
version: v2cv3
provider: "{{ cli }}"
- name: "Config SNMP target host"
ce_snmp_target_host:
state: present
host_name: test1
address: 1.1.1.1
notify_type: trap
vpn_name: js
security_model: v2c
security_name: wdz
provider: "{{ cli }}"
'''
RETURN = '''
changed:
description: check to see if a change was made on the device
returned: always
type: bool
sample: true
proposed:
description: k/v pairs of parameters passed into module
returned: always
type: dict
sample: {"address": "10.135.182.158", "host_name": "test2",
"notify_type": "trap", "security_level": "authentication",
"security_model": "v3", "security_name_v3": "wdz",
"state": "present", "vpn_name": "js"}
existing:
description: k/v pairs of existing aaa server
returned: always
type: dict
sample: {}
end_state:
description: k/v pairs of aaa params after module execution
returned: always
type: dict
sample: {"target host info": [{"address": "10.135.182.158", "domain": "snmpUDPDomain",
"nmsName": "test2", "notifyType": "trap",
"securityLevel": "authentication", "securityModel": "v3",
"securityNameV3": "wdz", "vpnInstanceName": "js"}]}
updates:
description: command sent to the device
returned: always
type: list
sample: ["snmp-agent target-host host-name test2 trap address udp-domain 10.135.182.158 vpn-instance js params securityname wdz v3 authentication"]
'''
from xml.etree import ElementTree
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network.cloudengine.ce import get_nc_config, set_nc_config, \
ce_argument_spec, get_config, load_config, check_ip_addr
# get snmp version
CE_GET_SNMP_VERSION = """
<filter type="subtree">
<snmp xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<engine>
<version></version>
</engine>
</snmp>
</filter>
"""
# merge snmp version
CE_MERGE_SNMP_VERSION = """
<config>
<snmp xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<engine operation="merge">
<version>%s</version>
</engine>
</snmp>
</config>
"""
# get snmp target host
CE_GET_SNMP_TARGET_HOST_HEADER = """
<filter type="subtree">
<snmp xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<targetHosts>
<targetHost>
<nmsName></nmsName>
"""
CE_GET_SNMP_TARGET_HOST_TAIL = """
</targetHost>
</targetHosts>
</snmp>
</filter>
"""
# merge snmp target host
CE_MERGE_SNMP_TARGET_HOST_HEADER = """
<config>
<snmp xmlns="http://www.huawei.com/netconf/vrp" format-version="1.0" content-version="1.0">
<targetHosts>
<targetHost operation="merge">
<nmsName>%s</nmsName>
"""
CE_MERGE_SNMP_TARGET_HOST_TAIL = """
</targetHost>
</targetHosts>
</snmp>
</config>
"""
# create snmp target host
CE_CREATE_SNMP_TARGET_HOST_HEADER = """
<config>
<snmp xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<targetHosts>
<targetHost operation="create">
<nmsName>%s</nmsName>
"""
CE_CREATE_SNMP_TARGET_HOST_TAIL = """
</targetHost>
</targetHosts>
</snmp>
</config>
"""
# delete snmp target host
CE_DELETE_SNMP_TARGET_HOST_HEADER = """
<config>
<snmp xmlns="http://www.huawei.com/netconf/vrp" format-version="1.0" content-version="1.0">
<targetHosts>
<targetHost operation="delete">
<nmsName>%s</nmsName>
"""
CE_DELETE_SNMP_TARGET_HOST_TAIL = """
</targetHost>
</targetHosts>
</snmp>
</config>
"""
INTERFACE_TYPE = ['ethernet', 'eth-trunk', 'tunnel', 'null', 'loopback',
'vlanif', '100ge', '40ge', 'mtunnel', '10ge', 'ge', 'meth', 'vbdif', 'nve']
class SnmpTargetHost(object):
""" Manages SNMP target host configuration """
def __init__(self, **kwargs):
""" Class init """
# module
argument_spec = kwargs["argument_spec"]
self.spec = argument_spec
required_together = [("address", "notify_type"), ("address", "notify_type")]
required_if = [
["security_model", "v1", ["security_name"]],
["security_model", "v2c", ["security_name"]],
["security_model", "v3", ["security_name_v3"]]
]
self.module = AnsibleModule(
argument_spec=argument_spec,
required_together=required_together,
required_if=required_if,
supports_check_mode=True
)
# module args
self.state = self.module.params['state']
self.version = self.module.params['version']
self.connect_port = self.module.params['connect_port']
self.host_name = self.module.params['host_name']
self.domain = "snmpUDPDomain"
self.address = self.module.params['address']
self.notify_type = self.module.params['notify_type']
self.vpn_name = self.module.params['vpn_name']
self.recv_port = self.module.params['recv_port']
self.security_model = self.module.params['security_model']
self.security_name = self.module.params['security_name']
self.security_name_v3 = self.module.params['security_name_v3']
self.security_level = self.module.params['security_level']
self.is_public_net = self.module.params['is_public_net']
self.interface_name = self.module.params['interface_name']
# config
self.cur_cli_cfg = dict()
self.cur_netconf_cfg = dict()
self.end_netconf_cfg = dict()
# state
self.changed = False
self.updates_cmd = list()
self.results = dict()
self.proposed = dict()
self.existing = dict()
self.end_state = dict()
def netconf_get_config(self, conf_str):
""" Get configure by netconf """
xml_str = get_nc_config(self.module, conf_str)
return xml_str
def netconf_set_config(self, conf_str):
""" Set configure by netconf """
xml_str = set_nc_config(self.module, conf_str)
return xml_str
def check_cli_args(self):
""" Check invalid cli args """
if self.connect_port:
if int(self.connect_port) != 161 and (int(self.connect_port) > 65535 or int(self.connect_port) < 1025):
self.module.fail_json(
msg='Error: The value of connect_port %s is out of [161, 1025 - 65535].' % self.connect_port)
def check_netconf_args(self, result):
""" Check invalid netconf args """
need_cfg = True
same_flag = True
delete_flag = False
result["target_host_info"] = []
if self.host_name:
if len(self.host_name) > 32 or len(self.host_name) < 1:
self.module.fail_json(
msg='Error: The len of host_name is out of [1 - 32].')
if self.vpn_name and self.is_public_net != 'no_use':
if self.is_public_net == "true":
self.module.fail_json(
msg='Error: Do not support vpn_name and is_public_net at the same time.')
conf_str = CE_GET_SNMP_TARGET_HOST_HEADER
if self.domain:
conf_str += "<domain></domain>"
if self.address:
if not check_ip_addr(ipaddr=self.address):
self.module.fail_json(
msg='Error: The host address [%s] is invalid.' % self.address)
conf_str += "<address></address>"
if self.notify_type:
conf_str += "<notifyType></notifyType>"
if self.vpn_name:
if len(self.vpn_name) > 31 or len(self.vpn_name) < 1:
self.module.fail_json(
msg='Error: The len of vpn_name is out of [1 - 31].')
conf_str += "<vpnInstanceName></vpnInstanceName>"
if self.recv_port:
if int(self.recv_port) > 65535 or int(self.recv_port) < 0:
self.module.fail_json(
msg='Error: The value of recv_port is out of [0 - 65535].')
conf_str += "<portNumber></portNumber>"
if self.security_model:
conf_str += "<securityModel></securityModel>"
if self.security_name:
if len(self.security_name) > 32 or len(self.security_name) < 1:
self.module.fail_json(
msg='Error: The len of security_name is out of [1 - 32].')
conf_str += "<securityName></securityName>"
if self.security_name_v3:
if len(self.security_name_v3) > 32 or len(self.security_name_v3) < 1:
self.module.fail_json(
msg='Error: The len of security_name_v3 is out of [1 - 32].')
conf_str += "<securityNameV3></securityNameV3>"
if self.security_level:
conf_str += "<securityLevel></securityLevel>"
if self.is_public_net != 'no_use':
conf_str += "<isPublicNet></isPublicNet>"
if self.interface_name:
if len(self.interface_name) > 63 or len(self.interface_name) < 1:
self.module.fail_json(
msg='Error: The len of interface_name is out of [1 - 63].')
find_flag = False
for item in INTERFACE_TYPE:
if item in self.interface_name:
find_flag = True
break
if not find_flag:
self.module.fail_json(
msg='Error: Please input full name of interface_name.')
conf_str += "<interface-name></interface-name>"
conf_str += CE_GET_SNMP_TARGET_HOST_TAIL
recv_xml = self.netconf_get_config(conf_str=conf_str)
if "<data/>" in recv_xml:
if self.state == "present":
same_flag = False
else:
delete_flag = False
else:
xml_str = recv_xml.replace('\r', '').replace('\n', '').\
replace('xmlns="urn:ietf:params:xml:ns:netconf:base:1.0"', "").\
replace('xmlns="http://www.huawei.com/netconf/vrp"', "")
root = ElementTree.fromstring(xml_str)
target_host_info = root.findall(
"data/snmp/targetHosts/targetHost")
if target_host_info:
for tmp in target_host_info:
tmp_dict = dict()
for site in tmp:
if site.tag in ["nmsName", "domain", "address", "notifyType", "vpnInstanceName",
"portNumber", "securityModel", "securityName", "securityNameV3",
"securityLevel", "isPublicNet", "interface-name"]:
tmp_dict[site.tag] = site.text
result["target_host_info"].append(tmp_dict)
if result["target_host_info"]:
for tmp in result["target_host_info"]:
same_flag = True
if "nmsName" in tmp.keys():
if tmp["nmsName"] != self.host_name:
same_flag = False
else:
delete_flag = True
if "domain" in tmp.keys():
if tmp["domain"] != self.domain:
same_flag = False
if "address" in tmp.keys():
if tmp["address"] != self.address:
same_flag = False
if "notifyType" in tmp.keys():
if tmp["notifyType"] != self.notify_type:
same_flag = False
if "vpnInstanceName" in tmp.keys():
if tmp["vpnInstanceName"] != self.vpn_name:
same_flag = False
if "portNumber" in tmp.keys():
if tmp["portNumber"] != self.recv_port:
same_flag = False
if "securityModel" in tmp.keys():
if tmp["securityModel"] != self.security_model:
same_flag = False
if "securityName" in tmp.keys():
if tmp["securityName"] != self.security_name:
same_flag = False
if "securityNameV3" in tmp.keys():
if tmp["securityNameV3"] != self.security_name_v3:
same_flag = False
if "securityLevel" in tmp.keys():
if tmp["securityLevel"] != self.security_level:
same_flag = False
if "isPublicNet" in tmp.keys():
if tmp["isPublicNet"] != self.is_public_net:
same_flag = False
if "interface-name" in tmp.keys():
if tmp["interface-name"] != self.interface_name:
same_flag = False
if same_flag:
break
if self.state == "present":
need_cfg = True
if same_flag:
need_cfg = False
else:
need_cfg = False
if delete_flag:
need_cfg = True
result["need_cfg"] = need_cfg
def cli_load_config(self, commands):
""" Load configure by cli """
if not self.module.check_mode:
load_config(self.module, commands)
def get_snmp_version(self):
""" Get snmp version """
version = None
conf_str = CE_GET_SNMP_VERSION
recv_xml = self.netconf_get_config(conf_str=conf_str)
if "<data/>" in recv_xml:
pass
else:
xml_str = recv_xml.replace('\r', '').replace('\n', '').\
replace('xmlns="urn:ietf:params:xml:ns:netconf:base:1.0"', "").\
replace('xmlns="http://www.huawei.com/netconf/vrp"', "")
root = ElementTree.fromstring(xml_str)
version_info = root.find("data/snmp/engine")
if version_info:
for site in version_info:
if site.tag in ["version"]:
version = site.text
return version
def cli_get_connect_port(self):
""" Get connect port by cli """
regular = "| include snmp | include snmp-agent udp-port"
flags = list()
flags.append(regular)
tmp_cfg = get_config(self.module, flags)
return tmp_cfg
def get_proposed(self):
""" Get proposed state """
self.proposed["state"] = self.state
if self.version:
self.proposed["version"] = self.version
if self.connect_port:
self.proposed["connect_port"] = self.connect_port
if self.host_name:
self.proposed["host_name"] = self.host_name
if self.address:
self.proposed["address"] = self.address
if self.notify_type:
self.proposed["notify_type"] = self.notify_type
if self.vpn_name:
self.proposed["vpn_name"] = self.vpn_name
if self.recv_port:
self.proposed["recv_port"] = self.recv_port
if self.security_model:
self.proposed["security_model"] = self.security_model
if self.security_name:
self.proposed["security_name"] = "******"
if self.security_name_v3:
self.proposed["security_name_v3"] = self.security_name_v3
if self.security_level:
self.proposed["security_level"] = self.security_level
if self.is_public_net != 'no_use':
self.proposed["is_public_net"] = self.is_public_net
if self.interface_name:
self.proposed["interface_name"] = self.interface_name
def get_existing(self):
""" Get existing state """
if self.version:
version = self.get_snmp_version()
if version:
self.cur_cli_cfg["version"] = version
self.existing["version"] = version
if self.connect_port:
tmp_cfg = self.cli_get_connect_port()
if tmp_cfg:
temp_data = tmp_cfg.split(r"udp-port ")
self.cur_cli_cfg["connect port"] = temp_data[1]
self.existing["connect port"] = temp_data[1]
if self.host_name:
self.existing["target host info"] = self.cur_netconf_cfg[
"target_host_info"]
def get_end_state(self):
""" Get end state """
if self.version:
version = self.get_snmp_version()
if version:
self.end_state["version"] = version
if self.connect_port:
tmp_cfg = self.cli_get_connect_port()
if tmp_cfg:
temp_data = tmp_cfg.split(r"udp-port ")
self.end_state["connect port"] = temp_data[1]
if self.host_name:
self.end_state["target host info"] = self.end_netconf_cfg[
"target_host_info"]
def config_version_cli(self):
""" Config version by cli """
if "disable" in self.cur_cli_cfg["version"]:
cmd = "snmp-agent sys-info version %s" % self.version
self.updates_cmd.append(cmd)
cmds = list()
cmds.append(cmd)
self.cli_load_config(cmds)
self.changed = True
else:
if self.version != self.cur_cli_cfg["version"]:
cmd = "snmp-agent sys-info version %s disable" % self.cur_cli_cfg[
"version"]
self.updates_cmd.append(cmd)
cmd = "snmp-agent sys-info version %s" % self.version
self.updates_cmd.append(cmd)
cmds = list()
cmds.append(cmd)
self.cli_load_config(cmds)
self.changed = True
def undo_config_version_cli(self):
""" Undo config version by cli """
if "disable" in self.cur_cli_cfg["version"]:
pass
else:
cmd = "snmp-agent sys-info version %s disable" % self.cur_cli_cfg[
"version"]
cmds = list()
cmds.append(cmd)
self.updates_cmd.append(cmd)
self.cli_load_config(cmds)
self.changed = True
def config_connect_port_cli(self):
""" Config connect port by cli """
if "connect port" in self.cur_cli_cfg.keys():
if self.cur_cli_cfg["connect port"] == self.connect_port:
pass
else:
cmd = "snmp-agent udp-port %s" % self.connect_port
cmds = list()
cmds.append(cmd)
self.updates_cmd.append(cmd)
self.cli_load_config(cmds)
self.changed = True
else:
cmd = "snmp-agent udp-port %s" % self.connect_port
cmds = list()
cmds.append(cmd)
self.updates_cmd.append(cmd)
self.cli_load_config(cmds)
self.changed = True
def undo_config_connect_port_cli(self):
""" Undo config connect port by cli """
if "connect port" in self.cur_cli_cfg.keys():
if not self.cur_cli_cfg["connect port"]:
pass
else:
cmd = "undo snmp-agent udp-port"
cmds = list()
cmds.append(cmd)
self.updates_cmd.append(cmd)
self.cli_load_config(cmds)
self.changed = True
def merge_snmp_target_host(self):
""" Merge snmp target host operation """
conf_str = CE_MERGE_SNMP_TARGET_HOST_HEADER % self.host_name
if self.domain:
conf_str += "<domain>%s</domain>" % self.domain
if self.address:
conf_str += "<address>%s</address>" % self.address
if self.notify_type:
conf_str += "<notifyType>%s</notifyType>" % self.notify_type
if self.vpn_name:
conf_str += "<vpnInstanceName>%s</vpnInstanceName>" % self.vpn_name
if self.recv_port:
conf_str += "<portNumber>%s</portNumber>" % self.recv_port
if self.security_model:
conf_str += "<securityModel>%s</securityModel>" % self.security_model
if self.security_name:
conf_str += "<securityName>%s</securityName>" % self.security_name
if self.security_name_v3:
conf_str += "<securityNameV3>%s</securityNameV3>" % self.security_name_v3
if self.security_level:
conf_str += "<securityLevel>%s</securityLevel>" % self.security_level
if self.is_public_net != 'no_use':
conf_str += "<isPublicNet>%s</isPublicNet>" % self.is_public_net
if self.interface_name:
conf_str += "<interface-name>%s</interface-name>" % self.interface_name
conf_str += CE_MERGE_SNMP_TARGET_HOST_TAIL
recv_xml = self.netconf_set_config(conf_str=conf_str)
if "<ok/>" not in recv_xml:
self.module.fail_json(msg='Error: Merge snmp target host failed.')
cmd = "snmp-agent target-host host-name %s " % self.host_name
cmd += "%s " % self.notify_type
cmd += "address udp-domain %s " % self.address
if self.recv_port:
cmd += "udp-port %s " % self.recv_port
if self.interface_name:
cmd += "source %s " % self.interface_name
if self.vpn_name:
cmd += "vpn-instance %s " % self.vpn_name
if self.is_public_net == "true":
cmd += "public-net "
if self.security_model in ["v1", "v2c"] and self.security_name:
cmd += "params securityname %s %s " % (
"******", self.security_model)
if self.security_model == "v3" and self.security_name_v3:
cmd += "params securityname %s %s " % (
self.security_name_v3, self.security_model)
if self.security_level and self.security_level in ["authentication", "privacy "]:
cmd += "%s" % self.security_level
self.changed = True
self.updates_cmd.append(cmd)
def delete_snmp_target_host(self):
""" Delete snmp target host operation """
conf_str = CE_DELETE_SNMP_TARGET_HOST_HEADER % self.host_name
if self.domain:
conf_str += "<domain>%s</domain>" % self.domain
if self.address:
conf_str += "<address>%s</address>" % self.address
if self.notify_type:
conf_str += "<notifyType>%s</notifyType>" % self.notify_type
if self.vpn_name:
conf_str += "<vpnInstanceName>%s</vpnInstanceName>" % self.vpn_name
if self.recv_port:
conf_str += "<portNumber>%s</portNumber>" % self.recv_port
if self.security_model:
conf_str += "<securityModel>%s</securityModel>" % self.security_model
if self.security_name:
conf_str += "<securityName>%s</securityName>" % self.security_name
if self.security_name_v3:
conf_str += "<securityNameV3>%s</securityNameV3>" % self.security_name_v3
if self.security_level:
conf_str += "<securityLevel>%s</securityLevel>" % self.security_level
if self.is_public_net != 'no_use':
conf_str += "<isPublicNet>%s</isPublicNet>" % self.is_public_net
if self.interface_name:
conf_str += "<interface-name>%s</interface-name>" % self.interface_name
conf_str += CE_DELETE_SNMP_TARGET_HOST_TAIL
recv_xml = self.netconf_set_config(conf_str=conf_str)
if "<ok/>" not in recv_xml:
self.module.fail_json(msg='Error: Delete snmp target host failed.')
if not self.address:
cmd = "undo snmp-agent target-host host-name %s " % self.host_name
else:
cmd = "undo snmp-agent target-host trap address udp-domain %s " % self.address
if self.recv_port:
cmd += "udp-port %s " % self.recv_port
if self.interface_name:
cmd += "source %s " % self.interface_name
if self.vpn_name:
cmd += "vpn-instance %s " % self.vpn_name
if self.is_public_net == "true":
cmd += "public-net "
if self.security_model in ["v1", "v2c"] and self.security_name:
cmd += "params securityname %s" % "******"
if self.security_model == "v3" and self.security_name_v3:
cmd += "params securityname %s" % self.security_name_v3
self.changed = True
self.updates_cmd.append(cmd)
def merge_snmp_version(self):
""" Merge snmp version operation """
conf_str = CE_MERGE_SNMP_VERSION % self.version
recv_xml = self.netconf_set_config(conf_str=conf_str)
if "<ok/>" not in recv_xml:
self.module.fail_json(msg='Error: Merge snmp version failed.')
if self.version == "none":
cmd = "snmp-agent sys-info version %s disable" % self.cur_cli_cfg[
"version"]
self.updates_cmd.append(cmd)
elif self.version == "v1v2c":
cmd = "snmp-agent sys-info version v1"
self.updates_cmd.append(cmd)
cmd = "snmp-agent sys-info version v2c"
self.updates_cmd.append(cmd)
elif self.version == "v1v3":
cmd = "snmp-agent sys-info version v1"
self.updates_cmd.append(cmd)
cmd = "snmp-agent sys-info version v3"
self.updates_cmd.append(cmd)
elif self.version == "v2cv3":
cmd = "snmp-agent sys-info version v2c"
self.updates_cmd.append(cmd)
cmd = "snmp-agent sys-info version v3"
self.updates_cmd.append(cmd)
else:
cmd = "snmp-agent sys-info version %s" % self.version
self.updates_cmd.append(cmd)
self.changed = True
def work(self):
""" Main work function """
self.check_cli_args()
self.check_netconf_args(self.cur_netconf_cfg)
self.get_proposed()
self.get_existing()
if self.state == "present":
if self.version:
if self.version != self.cur_cli_cfg["version"]:
self.merge_snmp_version()
if self.connect_port:
self.config_connect_port_cli()
if self.cur_netconf_cfg["need_cfg"]:
self.merge_snmp_target_host()
else:
if self.connect_port:
self.undo_config_connect_port_cli()
if self.cur_netconf_cfg["need_cfg"]:
self.delete_snmp_target_host()
self.check_netconf_args(self.end_netconf_cfg)
self.get_end_state()
self.results['changed'] = self.changed
self.results['proposed'] = self.proposed
self.results['existing'] = self.existing
self.results['end_state'] = self.end_state
self.results['updates'] = self.updates_cmd
self.module.exit_json(**self.results)
def main():
""" Module main """
argument_spec = dict(
state=dict(choices=['present', 'absent'], default='present'),
version=dict(choices=['none', 'v1', 'v2c', 'v3',
'v1v2c', 'v1v3', 'v2cv3', 'all']),
connect_port=dict(type='str'),
host_name=dict(type='str'),
address=dict(type='str'),
notify_type=dict(choices=['trap', 'inform']),
vpn_name=dict(type='str'),
recv_port=dict(type='str'),
security_model=dict(choices=['v1', 'v2c', 'v3']),
security_name=dict(type='str', no_log=True),
security_name_v3=dict(type='str'),
security_level=dict(
choices=['noAuthNoPriv', 'authentication', 'privacy']),
is_public_net=dict(type='str', default='no_use', choices=['no_use', 'true', 'false']),
interface_name=dict(type='str')
)
argument_spec.update(ce_argument_spec)
module = SnmpTargetHost(argument_spec=argument_spec)
module.work()
if __name__ == '__main__':
main()
| gpl-3.0 |
cosenal/osf.io | website/archiver/model.py | 38 | 5944 | import datetime
from modularodm import fields
from framework.mongo import ObjectId
from framework.mongo import StoredObject
from website.archiver import (
ARCHIVER_INITIATED,
ARCHIVER_SUCCESS,
ARCHIVER_FAILURE,
ARCHIVER_FAILURE_STATUSES
)
from website.addons.base import StorageAddonBase
from website import settings
class ArchiveTarget(StoredObject):
"""Stores the results of archiving a single addon
"""
_id = fields.StringField(
primary=True,
default=lambda: str(ObjectId())
)
# addon_short_name of target addon
name = fields.StringField()
status = fields.StringField(default=ARCHIVER_INITIATED)
# <dict> representation of a website.archiver.AggregateStatResult
# Format: {
# 'target_id': <str>,
# 'target_name': <str>,
# 'targets': <list>(StatResult | AggregateStatResult),
# 'num_files': <int>,
# 'disk_usage': <float>,
# }
stat_result = fields.DictionaryField()
errors = fields.StringField(list=True)
def __repr__(self):
return '<{0}(_id={1}, name={2}, status={3})>'.format(
self.__class__.__name__,
self._id,
self.name,
self.status
)
class ArchiveJob(StoredObject):
_id = fields.StringField(
primary=True,
default=lambda: str(ObjectId())
)
# whether or not the ArchiveJob is complete (success or fail)
done = fields.BooleanField(default=False)
# whether or not emails have been sent for this ArchiveJob
sent = fields.BooleanField(default=False)
status = fields.StringField(default=ARCHIVER_INITIATED)
datetime_initiated = fields.DateTimeField(default=datetime.datetime.utcnow)
dst_node = fields.ForeignField('node', backref='active')
src_node = fields.ForeignField('node')
initiator = fields.ForeignField('user')
target_addons = fields.ForeignField('archivetarget', list=True)
def __repr__(self):
return (
'<{ClassName}(_id={self._id}, done={self.done}, '
' status={self.status}, src_node={self.src_node}, dst_node={self.dst_node})>'
).format(ClassName=self.__class__.__name__, self=self)
@property
def children(self):
return [node.archive_job for node in self.dst_node.nodes if node.primary]
@property
def parent(self):
parent_node = self.dst_node.parent_node
return parent_node.archive_job if parent_node else None
@property
def success(self):
return self.status == ARCHIVER_SUCCESS
@property
def pending(self):
return any([
target for target in self.target_addons
if target.status not in (ARCHIVER_SUCCESS, ARCHIVER_FAILURE)
])
def info(self):
return self.src_node, self.dst_node, self.initiator
def target_info(self):
return [
{
'name': target.name,
'status': target.status,
'stat_result': target.stat_result,
'errors': target.errors
}
for target in self.target_addons
]
def archive_tree_finished(self):
if not self.pending:
return len(
[
ret for ret in [
child.archive_tree_finished()
for child in self.children
] if ret]
) if len(self.children) else True
return False
def _fail_above(self):
"""Marks all ArchiveJob instances attached to Nodes above this as failed
"""
parent = self.parent
if parent:
parent.status = ARCHIVER_FAILURE
parent.save()
def _post_update_target(self):
"""Checks for success or failure if the ArchiveJob on self.dst_node
is finished
"""
if self.status == ARCHIVER_FAILURE:
return
if not self.pending:
self.done = True
if any([target.status for target in self.target_addons if target.status in ARCHIVER_FAILURE_STATUSES]):
self.status = ARCHIVER_FAILURE
self._fail_above()
else:
self.status = ARCHIVER_SUCCESS
self.save()
def get_target(self, addon_short_name):
try:
return [addon for addon in self.target_addons if addon.name == addon_short_name][0]
except IndexError:
return None
def _set_target(self, addon_short_name):
if self.get_target(addon_short_name):
return
target = ArchiveTarget(name=addon_short_name)
target.save()
self.target_addons.append(target)
def set_targets(self):
addons = []
for addon in [self.src_node.get_addon(name)
for name in settings.ADDONS_ARCHIVABLE
if settings.ADDONS_ARCHIVABLE[name] != 'none']:
if not addon or not addon.complete or not isinstance(addon, StorageAddonBase):
continue
archive_errors = getattr(addon, 'archive_errors', None)
if not archive_errors or (archive_errors and not archive_errors()):
if addon.config.short_name == 'dataverse':
addons.append(addon.config.short_name + '-draft')
addons.append(addon.config.short_name + '-published')
else:
addons.append(addon.config.short_name)
for addon in addons:
self._set_target(addon)
self.save()
def update_target(self, addon_short_name, status, stat_result=None, errors=None):
stat_result = stat_result or {}
errors = errors or []
target = self.get_target(addon_short_name)
target.status = status
target.errors = errors
target.stat_result = stat_result
target.save()
self._post_update_target()
| apache-2.0 |
benssson/flatbuffers | tests/py_test.py | 16 | 49316 | # coding=utf-8
# Copyright 2014 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os.path
import sys
PY_VERSION = sys.version_info[:2]
import ctypes
from collections import defaultdict
import timeit
import unittest
from flatbuffers import compat
from flatbuffers.compat import range_func as compat_range
import flatbuffers
from flatbuffers import number_types as N
import MyGame # refers to generated code
import MyGame.Example # refers to generated code
import MyGame.Example.Any # refers to generated code
import MyGame.Example.Color # refers to generated code
import MyGame.Example.Monster # refers to generated code
import MyGame.Example.Test # refers to generated code
import MyGame.Example.Stat # refers to generated code
import MyGame.Example.Vec3 # refers to generated code
def assertRaises(test_case, fn, exception_class):
''' Backwards-compatible assertion for exceptions raised. '''
exc = None
try:
fn()
except Exception as e:
exc = e
test_case.assertTrue(exc is not None)
test_case.assertTrue(isinstance(exc, exception_class))
class TestWireFormat(unittest.TestCase):
def test_wire_format(self):
# Verify that using the generated Python code builds a buffer without
# returning errors, and is interpreted correctly:
gen_buf, gen_off = make_monster_from_generated_code()
CheckReadBuffer(gen_buf, gen_off)
# Verify that the canonical flatbuffer file is readable by the
# generated Python code. Note that context managers are not part of
# Python 2.5, so we use the simpler open/close methods here:
f = open('monsterdata_test.mon', 'rb')
canonicalWireData = f.read()
f.close()
CheckReadBuffer(bytearray(canonicalWireData), 0)
# Write the generated buffer out to a file:
f = open('monsterdata_python_wire.mon', 'wb')
f.write(gen_buf[gen_off:])
f.close()
def CheckReadBuffer(buf, offset):
''' CheckReadBuffer checks that the given buffer is evaluated correctly
as the example Monster. '''
def asserter(stmt):
''' An assertion helper that is separated from TestCase classes. '''
if not stmt:
raise AssertionError('CheckReadBuffer case failed')
monster = MyGame.Example.Monster.Monster.GetRootAsMonster(buf, offset)
asserter(monster.Hp() == 80)
asserter(monster.Mana() == 150)
asserter(monster.Name() == b'MyMonster')
# initialize a Vec3 from Pos()
vec = monster.Pos()
asserter(vec is not None)
# verify the properties of the Vec3
asserter(vec.X() == 1.0)
asserter(vec.Y() == 2.0)
asserter(vec.Z() == 3.0)
asserter(vec.Test1() == 3.0)
asserter(vec.Test2() == 2)
# initialize a Test from Test3(...)
t = MyGame.Example.Test.Test()
t = vec.Test3(t)
asserter(t is not None)
# verify the properties of the Test
asserter(t.A() == 5)
asserter(t.B() == 6)
# verify that the enum code matches the enum declaration:
union_type = MyGame.Example.Any.Any
asserter(monster.TestType() == union_type.Monster)
# initialize a Table from a union field Test(...)
table2 = monster.Test()
asserter(type(table2) is flatbuffers.table.Table)
# initialize a Monster from the Table from the union
monster2 = MyGame.Example.Monster.Monster()
monster2.Init(table2.Bytes, table2.Pos)
asserter(monster2.Name() == b"Fred")
# iterate through the first monster's inventory:
asserter(monster.InventoryLength() == 5)
invsum = 0
for i in compat_range(monster.InventoryLength()):
v = monster.Inventory(i)
invsum += int(v)
asserter(invsum == 10)
asserter(monster.Test4Length() == 2)
# create a 'Test' object and populate it:
test0 = monster.Test4(0)
asserter(type(test0) is MyGame.Example.Test.Test)
test1 = monster.Test4(1)
asserter(type(test1) is MyGame.Example.Test.Test)
# the position of test0 and test1 are swapped in monsterdata_java_wire
# and monsterdata_test_wire, so ignore ordering
v0 = test0.A()
v1 = test0.B()
v2 = test1.A()
v3 = test1.B()
sumtest12 = int(v0) + int(v1) + int(v2) + int(v3)
asserter(sumtest12 == 100)
asserter(monster.TestarrayofstringLength() == 2)
asserter(monster.Testarrayofstring(0) == b"test1")
asserter(monster.Testarrayofstring(1) == b"test2")
asserter(monster.TestarrayoftablesLength() == 0)
asserter(monster.TestnestedflatbufferLength() == 0)
asserter(monster.Testempty() is None)
class TestFuzz(unittest.TestCase):
''' Low level stress/fuzz test: serialize/deserialize a variety of
different kinds of data in different combinations '''
binary_type = compat.binary_types[0] # this will always exist
ofInt32Bytes = binary_type([0x83, 0x33, 0x33, 0x33])
ofInt64Bytes = binary_type([0x84, 0x44, 0x44, 0x44,
0x44, 0x44, 0x44, 0x44])
overflowingInt32Val = flatbuffers.encode.Get(flatbuffers.packer.int32,
ofInt32Bytes, 0)
overflowingInt64Val = flatbuffers.encode.Get(flatbuffers.packer.int64,
ofInt64Bytes, 0)
# Values we're testing against: chosen to ensure no bits get chopped
# off anywhere, and also be different from eachother.
boolVal = True
int8Val = N.Int8Flags.py_type(-127) # 0x81
uint8Val = N.Uint8Flags.py_type(0xFF)
int16Val = N.Int16Flags.py_type(-32222) # 0x8222
uint16Val = N.Uint16Flags.py_type(0xFEEE)
int32Val = N.Int32Flags.py_type(overflowingInt32Val)
uint32Val = N.Uint32Flags.py_type(0xFDDDDDDD)
int64Val = N.Int64Flags.py_type(overflowingInt64Val)
uint64Val = N.Uint64Flags.py_type(0xFCCCCCCCCCCCCCCC)
# Python uses doubles, so force it here
float32Val = N.Float32Flags.py_type(ctypes.c_float(3.14159).value)
float64Val = N.Float64Flags.py_type(3.14159265359)
def test_fuzz(self):
return self.check_once(11, 100)
def check_once(self, fuzzFields, fuzzObjects):
testValuesMax = 11 # hardcoded to the number of scalar types
builder = flatbuffers.Builder(0)
l = LCG()
objects = [0 for _ in compat_range(fuzzObjects)]
# Generate fuzzObjects random objects each consisting of
# fuzzFields fields, each of a random type.
for i in compat_range(fuzzObjects):
builder.StartObject(fuzzFields)
for j in compat_range(fuzzFields):
choice = int(l.Next()) % testValuesMax
if choice == 0:
builder.PrependBoolSlot(int(j), self.boolVal, False)
elif choice == 1:
builder.PrependInt8Slot(int(j), self.int8Val, 0)
elif choice == 2:
builder.PrependUint8Slot(int(j), self.uint8Val, 0)
elif choice == 3:
builder.PrependInt16Slot(int(j), self.int16Val, 0)
elif choice == 4:
builder.PrependUint16Slot(int(j), self.uint16Val, 0)
elif choice == 5:
builder.PrependInt32Slot(int(j), self.int32Val, 0)
elif choice == 6:
builder.PrependUint32Slot(int(j), self.uint32Val, 0)
elif choice == 7:
builder.PrependInt64Slot(int(j), self.int64Val, 0)
elif choice == 8:
builder.PrependUint64Slot(int(j), self.uint64Val, 0)
elif choice == 9:
builder.PrependFloat32Slot(int(j), self.float32Val, 0)
elif choice == 10:
builder.PrependFloat64Slot(int(j), self.float64Val, 0)
else:
raise RuntimeError('unreachable')
off = builder.EndObject()
# store the offset from the end of the builder buffer,
# since it will keep growing:
objects[i] = off
# Do some bookkeeping to generate stats on fuzzes:
stats = defaultdict(int)
def check(table, desc, want, got):
stats[desc] += 1
self.assertEqual(want, got, "%s != %s, %s" % (want, got, desc))
l = LCG() # Reset.
# Test that all objects we generated are readable and return the
# expected values. We generate random objects in the same order
# so this is deterministic.
for i in compat_range(fuzzObjects):
table = flatbuffers.table.Table(builder.Bytes,
len(builder.Bytes) - objects[i])
for j in compat_range(fuzzFields):
field_count = flatbuffers.builder.VtableMetadataFields + j
f = N.VOffsetTFlags.py_type(field_count *
N.VOffsetTFlags.bytewidth)
choice = int(l.Next()) % testValuesMax
if choice == 0:
check(table, "bool", self.boolVal,
table.GetSlot(f, False, N.BoolFlags))
elif choice == 1:
check(table, "int8", self.int8Val,
table.GetSlot(f, 0, N.Int8Flags))
elif choice == 2:
check(table, "uint8", self.uint8Val,
table.GetSlot(f, 0, N.Uint8Flags))
elif choice == 3:
check(table, "int16", self.int16Val,
table.GetSlot(f, 0, N.Int16Flags))
elif choice == 4:
check(table, "uint16", self.uint16Val,
table.GetSlot(f, 0, N.Uint16Flags))
elif choice == 5:
check(table, "int32", self.int32Val,
table.GetSlot(f, 0, N.Int32Flags))
elif choice == 6:
check(table, "uint32", self.uint32Val,
table.GetSlot(f, 0, N.Uint32Flags))
elif choice == 7:
check(table, "int64", self.int64Val,
table.GetSlot(f, 0, N.Int64Flags))
elif choice == 8:
check(table, "uint64", self.uint64Val,
table.GetSlot(f, 0, N.Uint64Flags))
elif choice == 9:
check(table, "float32", self.float32Val,
table.GetSlot(f, 0, N.Float32Flags))
elif choice == 10:
check(table, "float64", self.float64Val,
table.GetSlot(f, 0, N.Float64Flags))
else:
raise RuntimeError('unreachable')
# If enough checks were made, verify that all scalar types were used:
self.assertEqual(testValuesMax, len(stats),
"fuzzing failed to test all scalar types: %s" % stats)
class TestByteLayout(unittest.TestCase):
''' TestByteLayout checks the bytes of a Builder in various scenarios. '''
def assertBuilderEquals(self, builder, want_chars_or_ints):
def integerize(x):
if isinstance(x, compat.string_types):
return ord(x)
return x
want_ints = list(map(integerize, want_chars_or_ints))
want = bytearray(want_ints)
got = builder.Bytes[builder.Head():] # use the buffer directly
self.assertEqual(want, got)
def test_numbers(self):
b = flatbuffers.Builder(0)
self.assertBuilderEquals(b, [])
b.PrependBool(True)
self.assertBuilderEquals(b, [1])
b.PrependInt8(-127)
self.assertBuilderEquals(b, [129, 1])
b.PrependUint8(255)
self.assertBuilderEquals(b, [255, 129, 1])
b.PrependInt16(-32222)
self.assertBuilderEquals(b, [0x22, 0x82, 0, 255, 129, 1]) # first pad
b.PrependUint16(0xFEEE)
# no pad this time:
self.assertBuilderEquals(b, [0xEE, 0xFE, 0x22, 0x82, 0, 255, 129, 1])
b.PrependInt32(-53687092)
self.assertBuilderEquals(b, [204, 204, 204, 252, 0xEE, 0xFE,
0x22, 0x82, 0, 255, 129, 1])
b.PrependUint32(0x98765432)
self.assertBuilderEquals(b, [0x32, 0x54, 0x76, 0x98,
204, 204, 204, 252,
0xEE, 0xFE, 0x22, 0x82,
0, 255, 129, 1])
def test_numbers64(self):
b = flatbuffers.Builder(0)
b.PrependUint64(0x1122334455667788)
self.assertBuilderEquals(b, [0x88, 0x77, 0x66, 0x55,
0x44, 0x33, 0x22, 0x11])
b = flatbuffers.Builder(0)
b.PrependInt64(0x1122334455667788)
self.assertBuilderEquals(b, [0x88, 0x77, 0x66, 0x55,
0x44, 0x33, 0x22, 0x11])
def test_1xbyte_vector(self):
b = flatbuffers.Builder(0)
self.assertBuilderEquals(b, [])
b.StartVector(flatbuffers.number_types.Uint8Flags.bytewidth, 1, 1)
self.assertBuilderEquals(b, [0, 0, 0]) # align to 4bytes
b.PrependByte(1)
self.assertBuilderEquals(b, [1, 0, 0, 0])
b.EndVector(1)
self.assertBuilderEquals(b, [1, 0, 0, 0, 1, 0, 0, 0]) # padding
def test_2xbyte_vector(self):
b = flatbuffers.Builder(0)
b.StartVector(flatbuffers.number_types.Uint8Flags.bytewidth, 2, 1)
self.assertBuilderEquals(b, [0, 0]) # align to 4bytes
b.PrependByte(1)
self.assertBuilderEquals(b, [1, 0, 0])
b.PrependByte(2)
self.assertBuilderEquals(b, [2, 1, 0, 0])
b.EndVector(2)
self.assertBuilderEquals(b, [2, 0, 0, 0, 2, 1, 0, 0]) # padding
def test_1xuint16_vector(self):
b = flatbuffers.Builder(0)
b.StartVector(flatbuffers.number_types.Uint16Flags.bytewidth, 1, 1)
self.assertBuilderEquals(b, [0, 0]) # align to 4bytes
b.PrependUint16(1)
self.assertBuilderEquals(b, [1, 0, 0, 0])
b.EndVector(1)
self.assertBuilderEquals(b, [1, 0, 0, 0, 1, 0, 0, 0]) # padding
def test_2xuint16_vector(self):
b = flatbuffers.Builder(0)
b.StartVector(flatbuffers.number_types.Uint16Flags.bytewidth, 2, 1)
self.assertBuilderEquals(b, []) # align to 4bytes
b.PrependUint16(0xABCD)
self.assertBuilderEquals(b, [0xCD, 0xAB])
b.PrependUint16(0xDCBA)
self.assertBuilderEquals(b, [0xBA, 0xDC, 0xCD, 0xAB])
b.EndVector(2)
self.assertBuilderEquals(b, [2, 0, 0, 0, 0xBA, 0xDC, 0xCD, 0xAB])
def test_create_ascii_string(self):
b = flatbuffers.Builder(0)
b.CreateString(u"foo", encoding='ascii')
# 0-terminated, no pad:
self.assertBuilderEquals(b, [3, 0, 0, 0, 'f', 'o', 'o', 0])
b.CreateString(u"moop", encoding='ascii')
# 0-terminated, 3-byte pad:
self.assertBuilderEquals(b, [4, 0, 0, 0, 'm', 'o', 'o', 'p',
0, 0, 0, 0,
3, 0, 0, 0, 'f', 'o', 'o', 0])
def test_create_utf8_string(self):
b = flatbuffers.Builder(0)
b.CreateString(u"Цлїςσδε")
self.assertBuilderEquals(b, "\x0e\x00\x00\x00\xd0\xa6\xd0\xbb\xd1\x97" \
"\xcf\x82\xcf\x83\xce\xb4\xce\xb5\x00\x00")
b.CreateString(u"フムアムカモケモ")
self.assertBuilderEquals(b, "\x18\x00\x00\x00\xef\xbe\x8c\xef\xbe\x91" \
"\xef\xbd\xb1\xef\xbe\x91\xef\xbd\xb6\xef\xbe\x93\xef\xbd\xb9\xef" \
"\xbe\x93\x00\x00\x00\x00\x0e\x00\x00\x00\xd0\xa6\xd0\xbb\xd1\x97" \
"\xcf\x82\xcf\x83\xce\xb4\xce\xb5\x00\x00")
def test_create_arbitrary_string(self):
b = flatbuffers.Builder(0)
s = "\x01\x02\x03"
b.CreateString(s) # Default encoding is utf-8.
# 0-terminated, no pad:
self.assertBuilderEquals(b, [3, 0, 0, 0, 1, 2, 3, 0])
s2 = "\x04\x05\x06\x07"
b.CreateString(s2) # Default encoding is utf-8.
# 0-terminated, 3-byte pad:
self.assertBuilderEquals(b, [4, 0, 0, 0, 4, 5, 6, 7, 0, 0, 0, 0,
3, 0, 0, 0, 1, 2, 3, 0])
def test_empty_vtable(self):
b = flatbuffers.Builder(0)
b.StartObject(0)
self.assertBuilderEquals(b, [])
b.EndObject()
self.assertBuilderEquals(b, [4, 0, 4, 0, 4, 0, 0, 0])
def test_vtable_with_one_true_bool(self):
b = flatbuffers.Builder(0)
self.assertBuilderEquals(b, [])
b.StartObject(1)
self.assertBuilderEquals(b, [])
b.PrependBoolSlot(0, True, False)
b.EndObject()
self.assertBuilderEquals(b, [
6, 0, # vtable bytes
8, 0, # length of object including vtable offset
7, 0, # start of bool value
6, 0, 0, 0, # offset for start of vtable (int32)
0, 0, 0, # padded to 4 bytes
1, # bool value
])
def test_vtable_with_one_default_bool(self):
b = flatbuffers.Builder(0)
self.assertBuilderEquals(b, [])
b.StartObject(1)
self.assertBuilderEquals(b, [])
b.PrependBoolSlot(0, False, False)
b.EndObject()
self.assertBuilderEquals(b, [
6, 0, # vtable bytes
4, 0, # end of object from here
0, 0, # entry 1 is zero
6, 0, 0, 0, # offset for start of vtable (int32)
])
def test_vtable_with_one_int16(self):
b = flatbuffers.Builder(0)
b.StartObject(1)
b.PrependInt16Slot(0, 0x789A, 0)
b.EndObject()
self.assertBuilderEquals(b, [
6, 0, # vtable bytes
8, 0, # end of object from here
6, 0, # offset to value
6, 0, 0, 0, # offset for start of vtable (int32)
0, 0, # padding to 4 bytes
0x9A, 0x78,
])
def test_vtable_with_two_int16(self):
b = flatbuffers.Builder(0)
b.StartObject(2)
b.PrependInt16Slot(0, 0x3456, 0)
b.PrependInt16Slot(1, 0x789A, 0)
b.EndObject()
self.assertBuilderEquals(b, [
8, 0, # vtable bytes
8, 0, # end of object from here
6, 0, # offset to value 0
4, 0, # offset to value 1
8, 0, 0, 0, # offset for start of vtable (int32)
0x9A, 0x78, # value 1
0x56, 0x34, # value 0
])
def test_vtable_with_int16_and_bool(self):
b = flatbuffers.Builder(0)
b.StartObject(2)
b.PrependInt16Slot(0, 0x3456, 0)
b.PrependBoolSlot(1, True, False)
b.EndObject()
self.assertBuilderEquals(b, [
8, 0, # vtable bytes
8, 0, # end of object from here
6, 0, # offset to value 0
5, 0, # offset to value 1
8, 0, 0, 0, # offset for start of vtable (int32)
0, # padding
1, # value 1
0x56, 0x34, # value 0
])
def test_vtable_with_empty_vector(self):
b = flatbuffers.Builder(0)
b.StartVector(flatbuffers.number_types.Uint8Flags.bytewidth, 0, 1)
vecend = b.EndVector(0)
b.StartObject(1)
b.PrependUOffsetTRelativeSlot(0, vecend, 0)
b.EndObject()
self.assertBuilderEquals(b, [
6, 0, # vtable bytes
8, 0,
4, 0, # offset to vector offset
6, 0, 0, 0, # offset for start of vtable (int32)
4, 0, 0, 0,
0, 0, 0, 0, # length of vector (not in struct)
])
def test_vtable_with_empty_vector_of_byte_and_some_scalars(self):
b = flatbuffers.Builder(0)
b.StartVector(flatbuffers.number_types.Uint8Flags.bytewidth, 0, 1)
vecend = b.EndVector(0)
b.StartObject(2)
b.PrependInt16Slot(0, 55, 0)
b.PrependUOffsetTRelativeSlot(1, vecend, 0)
b.EndObject()
self.assertBuilderEquals(b, [
8, 0, # vtable bytes
12, 0,
10, 0, # offset to value 0
4, 0, # offset to vector offset
8, 0, 0, 0, # vtable loc
8, 0, 0, 0, # value 1
0, 0, 55, 0, # value 0
0, 0, 0, 0, # length of vector (not in struct)
])
def test_vtable_with_1_int16_and_2vector_of_int16(self):
b = flatbuffers.Builder(0)
b.StartVector(flatbuffers.number_types.Int16Flags.bytewidth, 2, 1)
b.PrependInt16(0x1234)
b.PrependInt16(0x5678)
vecend = b.EndVector(2)
b.StartObject(2)
b.PrependUOffsetTRelativeSlot(1, vecend, 0)
b.PrependInt16Slot(0, 55, 0)
b.EndObject()
self.assertBuilderEquals(b, [
8, 0, # vtable bytes
12, 0, # length of object
6, 0, # start of value 0 from end of vtable
8, 0, # start of value 1 from end of buffer
8, 0, 0, 0, # offset for start of vtable (int32)
0, 0, # padding
55, 0, # value 0
4, 0, 0, 0, # vector position from here
2, 0, 0, 0, # length of vector (uint32)
0x78, 0x56, # vector value 1
0x34, 0x12, # vector value 0
])
def test_vtable_with_1_struct_of_1_int8__1_int16__1_int32(self):
b = flatbuffers.Builder(0)
b.StartObject(1)
b.Prep(4+4+4, 0)
b.PrependInt8(55)
b.Pad(3)
b.PrependInt16(0x1234)
b.Pad(2)
b.PrependInt32(0x12345678)
structStart = b.Offset()
b.PrependStructSlot(0, structStart, 0)
b.EndObject()
self.assertBuilderEquals(b, [
6, 0, # vtable bytes
16, 0, # end of object from here
4, 0, # start of struct from here
6, 0, 0, 0, # offset for start of vtable (int32)
0x78, 0x56, 0x34, 0x12, # value 2
0, 0, # padding
0x34, 0x12, # value 1
0, 0, 0, # padding
55, # value 0
])
def test_vtable_with_1_vector_of_2_struct_of_2_int8(self):
b = flatbuffers.Builder(0)
b.StartVector(flatbuffers.number_types.Int8Flags.bytewidth*2, 2, 1)
b.PrependInt8(33)
b.PrependInt8(44)
b.PrependInt8(55)
b.PrependInt8(66)
vecend = b.EndVector(2)
b.StartObject(1)
b.PrependUOffsetTRelativeSlot(0, vecend, 0)
b.EndObject()
self.assertBuilderEquals(b, [
6, 0, # vtable bytes
8, 0,
4, 0, # offset of vector offset
6, 0, 0, 0, # offset for start of vtable (int32)
4, 0, 0, 0, # vector start offset
2, 0, 0, 0, # vector length
66, # vector value 1,1
55, # vector value 1,0
44, # vector value 0,1
33, # vector value 0,0
])
def test_table_with_some_elements(self):
b = flatbuffers.Builder(0)
b.StartObject(2)
b.PrependInt8Slot(0, 33, 0)
b.PrependInt16Slot(1, 66, 0)
off = b.EndObject()
b.Finish(off)
self.assertBuilderEquals(b, [
12, 0, 0, 0, # root of table: points to vtable offset
8, 0, # vtable bytes
8, 0, # end of object from here
7, 0, # start of value 0
4, 0, # start of value 1
8, 0, 0, 0, # offset for start of vtable (int32)
66, 0, # value 1
0, # padding
33, # value 0
])
def test__one_unfinished_table_and_one_finished_table(self):
b = flatbuffers.Builder(0)
b.StartObject(2)
b.PrependInt8Slot(0, 33, 0)
b.PrependInt8Slot(1, 44, 0)
off = b.EndObject()
b.Finish(off)
b.StartObject(3)
b.PrependInt8Slot(0, 55, 0)
b.PrependInt8Slot(1, 66, 0)
b.PrependInt8Slot(2, 77, 0)
off = b.EndObject()
b.Finish(off)
self.assertBuilderEquals(b, [
16, 0, 0, 0, # root of table: points to object
0, 0, # padding
10, 0, # vtable bytes
8, 0, # size of object
7, 0, # start of value 0
6, 0, # start of value 1
5, 0, # start of value 2
10, 0, 0, 0, # offset for start of vtable (int32)
0, # padding
77, # value 2
66, # value 1
55, # value 0
12, 0, 0, 0, # root of table: points to object
8, 0, # vtable bytes
8, 0, # size of object
7, 0, # start of value 0
6, 0, # start of value 1
8, 0, 0, 0, # offset for start of vtable (int32)
0, 0, # padding
44, # value 1
33, # value 0
])
def test_a_bunch_of_bools(self):
b = flatbuffers.Builder(0)
b.StartObject(8)
b.PrependBoolSlot(0, True, False)
b.PrependBoolSlot(1, True, False)
b.PrependBoolSlot(2, True, False)
b.PrependBoolSlot(3, True, False)
b.PrependBoolSlot(4, True, False)
b.PrependBoolSlot(5, True, False)
b.PrependBoolSlot(6, True, False)
b.PrependBoolSlot(7, True, False)
off = b.EndObject()
b.Finish(off)
self.assertBuilderEquals(b, [
24, 0, 0, 0, # root of table: points to vtable offset
20, 0, # vtable bytes
12, 0, # size of object
11, 0, # start of value 0
10, 0, # start of value 1
9, 0, # start of value 2
8, 0, # start of value 3
7, 0, # start of value 4
6, 0, # start of value 5
5, 0, # start of value 6
4, 0, # start of value 7
20, 0, 0, 0, # vtable offset
1, # value 7
1, # value 6
1, # value 5
1, # value 4
1, # value 3
1, # value 2
1, # value 1
1, # value 0
])
def test_three_bools(self):
b = flatbuffers.Builder(0)
b.StartObject(3)
b.PrependBoolSlot(0, True, False)
b.PrependBoolSlot(1, True, False)
b.PrependBoolSlot(2, True, False)
off = b.EndObject()
b.Finish(off)
self.assertBuilderEquals(b, [
16, 0, 0, 0, # root of table: points to vtable offset
0, 0, # padding
10, 0, # vtable bytes
8, 0, # size of object
7, 0, # start of value 0
6, 0, # start of value 1
5, 0, # start of value 2
10, 0, 0, 0, # vtable offset from here
0, # padding
1, # value 2
1, # value 1
1, # value 0
])
def test_some_floats(self):
b = flatbuffers.Builder(0)
b.StartObject(1)
b.PrependFloat32Slot(0, 1.0, 0.0)
off = b.EndObject()
self.assertBuilderEquals(b, [
6, 0, # vtable bytes
8, 0, # size of object
4, 0, # start of value 0
6, 0, 0, 0, # vtable offset
0, 0, 128, 63, # value 0
])
def make_monster_from_generated_code():
''' Use generated code to build the example Monster. '''
b = flatbuffers.Builder(0)
string = b.CreateString("MyMonster")
test1 = b.CreateString("test1")
test2 = b.CreateString("test2")
fred = b.CreateString("Fred")
MyGame.Example.Monster.MonsterStartInventoryVector(b, 5)
b.PrependByte(4)
b.PrependByte(3)
b.PrependByte(2)
b.PrependByte(1)
b.PrependByte(0)
inv = b.EndVector(5)
MyGame.Example.Monster.MonsterStart(b)
MyGame.Example.Monster.MonsterAddName(b, fred)
mon2 = MyGame.Example.Monster.MonsterEnd(b)
MyGame.Example.Monster.MonsterStartTest4Vector(b, 2)
MyGame.Example.Test.CreateTest(b, 10, 20)
MyGame.Example.Test.CreateTest(b, 30, 40)
test4 = b.EndVector(2)
MyGame.Example.Monster.MonsterStartTestarrayofstringVector(b, 2)
b.PrependUOffsetTRelative(test2)
b.PrependUOffsetTRelative(test1)
testArrayOfString = b.EndVector(2)
MyGame.Example.Monster.MonsterStart(b)
pos = MyGame.Example.Vec3.CreateVec3(b, 1.0, 2.0, 3.0, 3.0, 2, 5, 6)
MyGame.Example.Monster.MonsterAddPos(b, pos)
MyGame.Example.Monster.MonsterAddHp(b, 80)
MyGame.Example.Monster.MonsterAddName(b, string)
MyGame.Example.Monster.MonsterAddInventory(b, inv)
MyGame.Example.Monster.MonsterAddTestType(b, 1)
MyGame.Example.Monster.MonsterAddTest(b, mon2)
MyGame.Example.Monster.MonsterAddTest4(b, test4)
MyGame.Example.Monster.MonsterAddTestarrayofstring(b, testArrayOfString)
mon = MyGame.Example.Monster.MonsterEnd(b)
b.Finish(mon)
return b.Bytes, b.Head()
class TestAllCodePathsOfExampleSchema(unittest.TestCase):
def setUp(self, *args, **kwargs):
super(TestAllCodePathsOfExampleSchema, self).setUp(*args, **kwargs)
b = flatbuffers.Builder(0)
MyGame.Example.Monster.MonsterStart(b)
gen_mon = MyGame.Example.Monster.MonsterEnd(b)
b.Finish(gen_mon)
self.mon = MyGame.Example.Monster.Monster.GetRootAsMonster(b.Bytes,
b.Head())
def test_default_monster_pos(self):
self.assertTrue(self.mon.Pos() is None)
def test_nondefault_monster_mana(self):
b = flatbuffers.Builder(0)
MyGame.Example.Monster.MonsterStart(b)
MyGame.Example.Monster.MonsterAddMana(b, 50)
mon = MyGame.Example.Monster.MonsterEnd(b)
b.Finish(mon)
got_mon = MyGame.Example.Monster.Monster.GetRootAsMonster(b.Bytes,
b.Head())
self.assertEqual(50, got_mon.Mana())
def test_default_monster_hp(self):
self.assertEqual(100, self.mon.Hp())
def test_default_monster_name(self):
self.assertEqual('', self.mon.Name())
def test_default_monster_inventory_item(self):
self.assertEqual(0, self.mon.Inventory(0))
def test_default_monster_inventory_length(self):
self.assertEqual(0, self.mon.InventoryLength())
def test_default_monster_color(self):
self.assertEqual(MyGame.Example.Color.Color.Blue, self.mon.Color())
def test_nondefault_monster_color(self):
b = flatbuffers.Builder(0)
color = MyGame.Example.Color.Color.Red
MyGame.Example.Monster.MonsterStart(b)
MyGame.Example.Monster.MonsterAddColor(b, color)
mon = MyGame.Example.Monster.MonsterEnd(b)
b.Finish(mon)
mon2 = MyGame.Example.Monster.Monster.GetRootAsMonster(b.Bytes,
b.Head())
self.assertEqual(MyGame.Example.Color.Color.Red, mon2.Color())
def test_default_monster_testtype(self):
self.assertEqual(0, self.mon.TestType())
def test_default_monster_test_field(self):
self.assertEqual(None, self.mon.Test())
def test_default_monster_test4_item(self):
self.assertEqual(None, self.mon.Test4(0))
def test_default_monster_test4_length(self):
self.assertEqual(0, self.mon.Test4Length())
def test_default_monster_testarrayofstring(self):
self.assertEqual("", self.mon.Testarrayofstring(0))
def test_default_monster_testarrayofstring_length(self):
self.assertEqual(0, self.mon.TestarrayofstringLength())
def test_default_monster_testarrayoftables(self):
self.assertEqual(None, self.mon.Testarrayoftables(0))
def test_nondefault_monster_testarrayoftables(self):
b = flatbuffers.Builder(0)
# make a child Monster within a vector of Monsters:
MyGame.Example.Monster.MonsterStart(b)
MyGame.Example.Monster.MonsterAddHp(b, 99)
sub_monster = MyGame.Example.Monster.MonsterEnd(b)
# build the vector:
MyGame.Example.Monster.MonsterStartTestarrayoftablesVector(b, 1)
b.PrependUOffsetTRelative(sub_monster)
vec = b.EndVector(1)
# make the parent monster and include the vector of Monster:
MyGame.Example.Monster.MonsterStart(b)
MyGame.Example.Monster.MonsterAddTestarrayoftables(b, vec)
mon = MyGame.Example.Monster.MonsterEnd(b)
b.Finish(mon)
# inspect the resulting data:
mon2 = MyGame.Example.Monster.Monster.GetRootAsMonster(b.Output(), 0)
self.assertEqual(99, mon2.Testarrayoftables(0).Hp())
self.assertEqual(1, mon2.TestarrayoftablesLength())
def test_default_monster_testarrayoftables_length(self):
self.assertEqual(0, self.mon.TestarrayoftablesLength())
def test_nondefault_monster_enemy(self):
b = flatbuffers.Builder(0)
# make an Enemy object:
MyGame.Example.Monster.MonsterStart(b)
MyGame.Example.Monster.MonsterAddHp(b, 88)
enemy = MyGame.Example.Monster.MonsterEnd(b)
b.Finish(enemy)
# make the parent monster and include the vector of Monster:
MyGame.Example.Monster.MonsterStart(b)
MyGame.Example.Monster.MonsterAddEnemy(b, enemy)
mon = MyGame.Example.Monster.MonsterEnd(b)
b.Finish(mon)
# inspect the resulting data:
mon2 = MyGame.Example.Monster.Monster.GetRootAsMonster(b.Bytes,
b.Head())
self.assertEqual(88, mon2.Enemy().Hp())
def test_default_monster_testnestedflatbuffer(self):
self.assertEqual(0, self.mon.Testnestedflatbuffer(0))
def test_default_monster_testnestedflatbuffer_length(self):
self.assertEqual(0, self.mon.TestnestedflatbufferLength())
def test_nondefault_monster_testnestedflatbuffer(self):
b = flatbuffers.Builder(0)
MyGame.Example.Monster.MonsterStartTestnestedflatbufferVector(b, 3)
b.PrependByte(4)
b.PrependByte(2)
b.PrependByte(0)
sub_buf = b.EndVector(3)
# make the parent monster and include the vector of Monster:
MyGame.Example.Monster.MonsterStart(b)
MyGame.Example.Monster.MonsterAddTestnestedflatbuffer(b, sub_buf)
mon = MyGame.Example.Monster.MonsterEnd(b)
b.Finish(mon)
# inspect the resulting data:
mon2 = MyGame.Example.Monster.Monster.GetRootAsMonster(b.Bytes,
b.Head())
self.assertEqual(3, mon2.TestnestedflatbufferLength())
self.assertEqual(0, mon2.Testnestedflatbuffer(0))
self.assertEqual(2, mon2.Testnestedflatbuffer(1))
self.assertEqual(4, mon2.Testnestedflatbuffer(2))
def test_nondefault_monster_testempty(self):
b = flatbuffers.Builder(0)
# make a Stat object:
MyGame.Example.Stat.StatStart(b)
MyGame.Example.Stat.StatAddVal(b, 123)
my_stat = MyGame.Example.Stat.StatEnd(b)
b.Finish(my_stat)
# include the stat object in a monster:
MyGame.Example.Monster.MonsterStart(b)
MyGame.Example.Monster.MonsterAddTestempty(b, my_stat)
mon = MyGame.Example.Monster.MonsterEnd(b)
b.Finish(mon)
# inspect the resulting data:
mon2 = MyGame.Example.Monster.Monster.GetRootAsMonster(b.Bytes,
b.Head())
self.assertEqual(123, mon2.Testempty().Val())
def test_default_monster_testbool(self):
self.assertFalse(self.mon.Testbool())
def test_nondefault_monster_testbool(self):
b = flatbuffers.Builder(0)
MyGame.Example.Monster.MonsterStart(b)
MyGame.Example.Monster.MonsterAddTestbool(b, True)
mon = MyGame.Example.Monster.MonsterEnd(b)
b.Finish(mon)
# inspect the resulting data:
mon2 = MyGame.Example.Monster.Monster.GetRootAsMonster(b.Bytes,
b.Head())
self.assertTrue(mon2.Testbool())
def test_default_monster_testhashes(self):
self.assertEqual(0, self.mon.Testhashs32Fnv1())
self.assertEqual(0, self.mon.Testhashu32Fnv1())
self.assertEqual(0, self.mon.Testhashs64Fnv1())
self.assertEqual(0, self.mon.Testhashu64Fnv1())
self.assertEqual(0, self.mon.Testhashs32Fnv1a())
self.assertEqual(0, self.mon.Testhashu32Fnv1a())
self.assertEqual(0, self.mon.Testhashs64Fnv1a())
self.assertEqual(0, self.mon.Testhashu64Fnv1a())
def test_nondefault_monster_testhashes(self):
b = flatbuffers.Builder(0)
MyGame.Example.Monster.MonsterStart(b)
MyGame.Example.Monster.MonsterAddTesthashs32Fnv1(b, 1)
MyGame.Example.Monster.MonsterAddTesthashu32Fnv1(b, 2)
MyGame.Example.Monster.MonsterAddTesthashs64Fnv1(b, 3)
MyGame.Example.Monster.MonsterAddTesthashu64Fnv1(b, 4)
MyGame.Example.Monster.MonsterAddTesthashs32Fnv1a(b, 5)
MyGame.Example.Monster.MonsterAddTesthashu32Fnv1a(b, 6)
MyGame.Example.Monster.MonsterAddTesthashs64Fnv1a(b, 7)
MyGame.Example.Monster.MonsterAddTesthashu64Fnv1a(b, 8)
mon = MyGame.Example.Monster.MonsterEnd(b)
b.Finish(mon)
# inspect the resulting data:
mon2 = MyGame.Example.Monster.Monster.GetRootAsMonster(b.Bytes,
b.Head())
self.assertEqual(1, mon2.Testhashs32Fnv1())
self.assertEqual(2, mon2.Testhashu32Fnv1())
self.assertEqual(3, mon2.Testhashs64Fnv1())
self.assertEqual(4, mon2.Testhashu64Fnv1())
self.assertEqual(5, mon2.Testhashs32Fnv1a())
self.assertEqual(6, mon2.Testhashu32Fnv1a())
self.assertEqual(7, mon2.Testhashs64Fnv1a())
self.assertEqual(8, mon2.Testhashu64Fnv1a())
def test_getrootas_for_nonroot_table(self):
b = flatbuffers.Builder(0)
string = b.CreateString("MyStat")
MyGame.Example.Stat.StatStart(b)
MyGame.Example.Stat.StatAddId(b, string)
MyGame.Example.Stat.StatAddVal(b, 12345678)
MyGame.Example.Stat.StatAddCount(b, 12345)
stat = MyGame.Example.Stat.StatEnd(b)
b.Finish(stat)
stat2 = MyGame.Example.Stat.Stat.GetRootAsStat(b.Bytes, b.Head())
self.assertEqual(b"MyStat", stat2.Id())
self.assertEqual(12345678, stat2.Val())
self.assertEqual(12345, stat2.Count())
class TestVtableDeduplication(unittest.TestCase):
''' TestVtableDeduplication verifies that vtables are deduplicated. '''
def test_vtable_deduplication(self):
b = flatbuffers.Builder(0)
b.StartObject(4)
b.PrependByteSlot(0, 0, 0)
b.PrependByteSlot(1, 11, 0)
b.PrependByteSlot(2, 22, 0)
b.PrependInt16Slot(3, 33, 0)
obj0 = b.EndObject()
b.StartObject(4)
b.PrependByteSlot(0, 0, 0)
b.PrependByteSlot(1, 44, 0)
b.PrependByteSlot(2, 55, 0)
b.PrependInt16Slot(3, 66, 0)
obj1 = b.EndObject()
b.StartObject(4)
b.PrependByteSlot(0, 0, 0)
b.PrependByteSlot(1, 77, 0)
b.PrependByteSlot(2, 88, 0)
b.PrependInt16Slot(3, 99, 0)
obj2 = b.EndObject()
got = b.Bytes[b.Head():]
want = bytearray([
240, 255, 255, 255, # == -12. offset to dedupped vtable.
99, 0,
88,
77,
248, 255, 255, 255, # == -8. offset to dedupped vtable.
66, 0,
55,
44,
12, 0,
8, 0,
0, 0,
7, 0,
6, 0,
4, 0,
12, 0, 0, 0,
33, 0,
22,
11,
])
self.assertEqual((len(want), want), (len(got), got))
table0 = flatbuffers.table.Table(b.Bytes, len(b.Bytes) - obj0)
table1 = flatbuffers.table.Table(b.Bytes, len(b.Bytes) - obj1)
table2 = flatbuffers.table.Table(b.Bytes, len(b.Bytes) - obj2)
def _checkTable(tab, voffsett_value, b, c, d):
# vtable size
got = tab.GetVOffsetTSlot(0, 0)
self.assertEqual(12, got, 'case 0, 0')
# object size
got = tab.GetVOffsetTSlot(2, 0)
self.assertEqual(8, got, 'case 2, 0')
# default value
got = tab.GetVOffsetTSlot(4, 0)
self.assertEqual(voffsett_value, got, 'case 4, 0')
got = tab.GetSlot(6, 0, N.Uint8Flags)
self.assertEqual(b, got, 'case 6, 0')
val = tab.GetSlot(8, 0, N.Uint8Flags)
self.assertEqual(c, val, 'failed 8, 0')
got = tab.GetSlot(10, 0, N.Uint8Flags)
self.assertEqual(d, got, 'failed 10, 0')
_checkTable(table0, 0, 11, 22, 33)
_checkTable(table1, 0, 44, 55, 66)
_checkTable(table2, 0, 77, 88, 99)
class TestExceptions(unittest.TestCase):
def test_object_is_nested_error(self):
b = flatbuffers.Builder(0)
b.StartObject(0)
assertRaises(self, lambda: b.StartObject(0),
flatbuffers.builder.IsNestedError)
def test_object_is_not_nested_error(self):
b = flatbuffers.Builder(0)
assertRaises(self, lambda: b.EndObject(),
flatbuffers.builder.IsNotNestedError)
def test_struct_is_not_inline_error(self):
b = flatbuffers.Builder(0)
b.StartObject(0)
assertRaises(self, lambda: b.PrependStructSlot(0, 1, 0),
flatbuffers.builder.StructIsNotInlineError)
def test_unreachable_error(self):
b = flatbuffers.Builder(0)
assertRaises(self, lambda: b.PrependUOffsetTRelative(1),
flatbuffers.builder.OffsetArithmeticError)
def test_create_string_is_nested_error(self):
b = flatbuffers.Builder(0)
b.StartObject(0)
s = 'test1'
assertRaises(self, lambda: b.CreateString(s),
flatbuffers.builder.IsNestedError)
def test_finished_bytes_error(self):
b = flatbuffers.Builder(0)
assertRaises(self, lambda: b.Output(),
flatbuffers.builder.BuilderNotFinishedError)
def CheckAgainstGoldDataGo():
try:
gen_buf, gen_off = make_monster_from_generated_code()
fn = 'monsterdata_go_wire.mon'
if not os.path.exists(fn):
print('Go-generated data does not exist, failed.')
return False
# would like to use a context manager here, but it's less
# backwards-compatible:
f = open(fn, 'rb')
go_wire_data = f.read()
f.close()
CheckReadBuffer(bytearray(go_wire_data), 0)
if not bytearray(gen_buf[gen_off:]) == bytearray(go_wire_data):
raise AssertionError('CheckAgainstGoldDataGo failed')
except:
print('Failed to test against Go-generated test data.')
return False
print('Can read Go-generated test data, and Python generates bytewise identical data.')
return True
def CheckAgainstGoldDataJava():
try:
gen_buf, gen_off = make_monster_from_generated_code()
fn = 'monsterdata_java_wire.mon'
if not os.path.exists(fn):
print('Java-generated data does not exist, failed.')
return False
f = open(fn, 'rb')
java_wire_data = f.read()
f.close()
CheckReadBuffer(bytearray(java_wire_data), 0)
except:
print('Failed to read Java-generated test data.')
return False
print('Can read Java-generated test data.')
return True
class LCG(object):
''' Include simple random number generator to ensure results will be the
same cross platform.
http://en.wikipedia.org/wiki/Park%E2%80%93Miller_random_number_generator '''
__slots__ = ['n']
InitialLCGSeed = 48271
def __init__(self):
self.n = self.InitialLCGSeed
def Reset(self):
self.n = self.InitialLCGSeed
def Next(self):
self.n = ((self.n * 279470273) % 4294967291) & 0xFFFFFFFF
return self.n
def BenchmarkVtableDeduplication(count):
'''
BenchmarkVtableDeduplication measures the speed of vtable deduplication
by creating `prePop` vtables, then populating `count` objects with a
different single vtable.
When count is large (as in long benchmarks), memory usage may be high.
'''
prePop = 10
builder = flatbuffers.Builder(0)
# pre-populate some vtables:
for i in compat_range(prePop):
builder.StartObject(i)
for j in compat_range(i):
builder.PrependInt16Slot(j, j, 0)
builder.EndObject()
# benchmark deduplication of a new vtable:
def f():
builder.StartObject(prePop)
for j in compat_range(prePop):
builder.PrependInt16Slot(j, j, 0)
builder.EndObject()
duration = timeit.timeit(stmt=f, number=count)
rate = float(count) / duration
print(('vtable deduplication rate: %.2f/sec' % rate))
def BenchmarkCheckReadBuffer(count, buf, off):
'''
BenchmarkCheckReadBuffer measures the speed of flatbuffer reading
by re-using the CheckReadBuffer function with the gold data.
'''
def f():
CheckReadBuffer(buf, off)
duration = timeit.timeit(stmt=f, number=count)
rate = float(count) / duration
data = float(len(buf) * count) / float(1024 * 1024)
data_rate = data / float(duration)
print(('traversed %d %d-byte flatbuffers in %.2fsec: %.2f/sec, %.2fMB/sec')
% (count, len(buf), duration, rate, data_rate))
def BenchmarkMakeMonsterFromGeneratedCode(count, length):
'''
BenchmarkMakeMonsterFromGeneratedCode measures the speed of flatbuffer
creation by re-using the make_monster_from_generated_code function for
generating gold data examples.
'''
duration = timeit.timeit(stmt=make_monster_from_generated_code,
number=count)
rate = float(count) / duration
data = float(length * count) / float(1024 * 1024)
data_rate = data / float(duration)
print(('built %d %d-byte flatbuffers in %.2fsec: %.2f/sec, %.2fMB/sec' % \
(count, length, duration, rate, data_rate)))
def backward_compatible_run_tests(**kwargs):
if PY_VERSION < (2, 6):
sys.stderr.write("Python version less than 2.6 are not supported")
sys.stderr.flush()
return False
# python2.6 has a reduced-functionality unittest.main function:
if PY_VERSION == (2, 6):
try:
unittest.main(**kwargs)
except SystemExit as e:
if not e.code == 0:
return False
return True
# python2.7 and above let us not exit once unittest.main is run:
kwargs['exit'] = False
kwargs['verbosity'] = 0
ret = unittest.main(**kwargs)
if ret.result.errors or ret.result.failures:
return False
return True
def main():
import os
import sys
if not len(sys.argv) == 4:
sys.stderr.write('Usage: %s <benchmark vtable count>'
'<benchmark read count> <benchmark build count>\n'
% sys.argv[0])
sys.stderr.write(' Provide COMPARE_GENERATED_TO_GO=1 to check'
'for bytewise comparison to Go data.\n')
sys.stderr.write(' Provide COMPARE_GENERATED_TO_JAVA=1 to check'
'for bytewise comparison to Java data.\n')
sys.stderr.flush()
sys.exit(1)
kwargs = dict(argv=sys.argv[:-3])
# run tests, and run some language comparison checks if needed:
success = backward_compatible_run_tests(**kwargs)
if success and os.environ.get('COMPARE_GENERATED_TO_GO', 0) == "1":
success = success and CheckAgainstGoldDataGo()
if success and os.environ.get('COMPARE_GENERATED_TO_JAVA', 0) == "1":
success = success and CheckAgainstGoldDataJava()
if not success:
sys.stderr.write('Tests failed, skipping benchmarks.\n')
sys.stderr.flush()
sys.exit(1)
# run benchmarks (if 0, they will be a noop):
bench_vtable = int(sys.argv[1])
bench_traverse = int(sys.argv[2])
bench_build = int(sys.argv[3])
if bench_vtable:
BenchmarkVtableDeduplication(bench_vtable)
if bench_traverse:
buf, off = make_monster_from_generated_code()
BenchmarkCheckReadBuffer(bench_traverse, buf, off)
if bench_build:
buf, off = make_monster_from_generated_code()
BenchmarkMakeMonsterFromGeneratedCode(bench_build, len(buf))
if __name__ == '__main__':
main()
| apache-2.0 |
olivierkes/manuskript | manuskript/ui/views/outlineDelegates.py | 2 | 13273 | #!/usr/bin/env python
# --!-- coding: utf8 --!--
from PyQt5.QtCore import Qt, QSize, QModelIndex
from PyQt5.QtGui import QColor, QPalette, QIcon, QFont, QFontMetrics, QBrush
from PyQt5.QtWidgets import QStyledItemDelegate, QStyleOptionViewItem, QStyle, QComboBox, QStyleOptionComboBox
from PyQt5.QtWidgets import qApp
from manuskript import settings
from manuskript.enums import Character, Outline
from manuskript.functions import outlineItemColors, mixColors, colorifyPixmap, toInt, toFloat, drawProgress
from manuskript.ui import style as S
class outlineTitleDelegate(QStyledItemDelegate):
def __init__(self, parent=None):
QStyledItemDelegate.__init__(self, parent)
self._view = None
def setView(self, view):
self._view = view
def paint(self, painter, option, index):
item = index.internalPointer()
colors = outlineItemColors(item)
style = qApp.style()
opt = QStyleOptionViewItem(option)
self.initStyleOption(opt, index)
iconRect = style.subElementRect(style.SE_ItemViewItemDecoration, opt)
textRect = style.subElementRect(style.SE_ItemViewItemText, opt)
# Background
style.drawPrimitive(style.PE_PanelItemViewItem, opt, painter)
if settings.viewSettings["Outline"]["Background"] != "Nothing" and not opt.state & QStyle.State_Selected:
col = colors[settings.viewSettings["Outline"]["Background"]]
if col != QColor(Qt.transparent):
col2 = QColor(S.base)
if opt.state & QStyle.State_Selected:
col2 = opt.palette.brush(QPalette.Normal, QPalette.Highlight).color()
col = mixColors(col, col2, .2)
painter.save()
painter.setBrush(col)
painter.setPen(Qt.NoPen)
rect = opt.rect
if self._view:
r2 = self._view.visualRect(index)
rect = self._view.viewport().rect()
rect.setLeft(r2.left())
rect.setTop(r2.top())
rect.setBottom(r2.bottom())
painter.drawRoundedRect(rect, 5, 5)
painter.restore()
# Icon
mode = QIcon.Normal
if not opt.state & QStyle.State_Enabled:
mode = QIcon.Disabled
elif opt.state & QStyle.State_Selected:
mode = QIcon.Selected
state = QIcon.On if opt.state & QStyle.State_Open else QIcon.Off
icon = opt.icon.pixmap(iconRect.size(), mode=mode, state=state)
if opt.icon and settings.viewSettings["Outline"]["Icon"] != "Nothing":
color = colors[settings.viewSettings["Outline"]["Icon"]]
colorifyPixmap(icon, color)
opt.icon = QIcon(icon)
opt.icon.paint(painter, iconRect, opt.decorationAlignment, mode, state)
# Text
if opt.text:
painter.save()
textColor = QColor(S.text)
if option.state & QStyle.State_Selected:
col = QColor(S.highlightedText)
textColor = col
painter.setPen(col)
if settings.viewSettings["Outline"]["Text"] != "Nothing":
col = colors[settings.viewSettings["Outline"]["Text"]]
if col == Qt.transparent:
col = textColor
# If text color is Compile and item is selected, we have
# to change the color
if settings.viewSettings["Outline"]["Text"] == "Compile" and \
item.compile() in [0, "0"]:
col = mixColors(textColor, QColor(S.window))
painter.setPen(col)
f = QFont(opt.font)
painter.setFont(f)
fm = QFontMetrics(f)
elidedText = fm.elidedText(opt.text, Qt.ElideRight, textRect.width())
painter.drawText(textRect, Qt.AlignLeft, elidedText)
painter.restore()
# QStyledItemDelegate.paint(self, painter, option, index)
class outlineCharacterDelegate(QStyledItemDelegate):
def __init__(self, mdlCharacter, parent=None):
QStyledItemDelegate.__init__(self, parent)
self.mdlCharacter = mdlCharacter
def sizeHint(self, option, index):
# s = QStyledItemDelegate.sizeHint(self, option, index)
item = QModelIndex()
character = self.mdlCharacter.getCharacterByID(index.data())
if character:
item = character.index(Character.name)
opt = QStyleOptionViewItem(option)
self.initStyleOption(opt, item)
s = QStyledItemDelegate.sizeHint(self, opt, item)
if s.width() > 200:
s.setWidth(200)
elif s.width() < 100:
s.setWidth(100)
return s + QSize(18, 0)
def createEditor(self, parent, option, index):
item = index.internalPointer()
# if item.isFolder(): # No POV for folders
# return
editor = QComboBox(parent)
editor.setAutoFillBackground(True)
editor.setFrame(False)
return editor
def setEditorData(self, editor, index):
# editor.addItem("")
editor.addItem(QIcon.fromTheme("dialog-no"), self.tr("None"))
l = [self.tr("Main"), self.tr("Secondary"), self.tr("Minor")]
for importance in range(3):
editor.addItem(l[importance])
editor.setItemData(editor.count() - 1, QBrush(QColor(S.highlightedTextDark)), Qt.ForegroundRole)
editor.setItemData(editor.count() - 1, QBrush(QColor(S.highlightLight)), Qt.BackgroundRole)
item = editor.model().item(editor.count() - 1)
item.setFlags(Qt.ItemIsEnabled)
for i in range(self.mdlCharacter.rowCount()):
imp = toInt(self.mdlCharacter.importance(i))
if not 2 - imp == importance: continue
# try:
editor.addItem(self.mdlCharacter.icon(i), self.mdlCharacter.name(i), self.mdlCharacter.ID(i))
editor.setItemData(editor.count() - 1, self.mdlCharacter.name(i), Qt.ToolTipRole)
# except:
# pass
editor.setCurrentIndex(editor.findData(index.data()))
editor.showPopup()
def setModelData(self, editor, model, index):
val = editor.currentData()
model.setData(index, val)
def paint(self, painter, option, index):
##option.rect.setWidth(option.rect.width() - 18)
# QStyledItemDelegate.paint(self, painter, option, index)
##option.rect.setWidth(option.rect.width() + 18)
itemIndex = QModelIndex()
character = self.mdlCharacter.getCharacterByID(index.data())
if character:
itemIndex = character.index(Character.name)
opt = QStyleOptionViewItem(option)
self.initStyleOption(opt, itemIndex)
qApp.style().drawControl(QStyle.CE_ItemViewItem, opt, painter)
# if index.isValid() and index.internalPointer().data(Outline.POV) not in ["", None]:
if itemIndex.isValid() and self.mdlCharacter.data(itemIndex) not in ["", None]:
opt = QStyleOptionComboBox()
opt.rect = option.rect
r = qApp.style().subControlRect(QStyle.CC_ComboBox, opt, QStyle.SC_ComboBoxArrow)
option.rect = r
qApp.style().drawPrimitive(QStyle.PE_IndicatorArrowDown, option, painter)
class outlineCompileDelegate(QStyledItemDelegate):
def __init__(self, parent=None):
QStyledItemDelegate.__init__(self, parent)
def displayText(self, value, locale):
return ""
#def createEditor(self, parent, option, index):
#return None
class outlineGoalPercentageDelegate(QStyledItemDelegate):
def __init__(self, rootIndex=None, parent=None):
QStyledItemDelegate.__init__(self, parent)
self.rootIndex = rootIndex
def sizeHint(self, option, index):
sh = QStyledItemDelegate.sizeHint(self, option, index)
# if sh.width() > 50:
sh.setWidth(100)
return sh
def paint(self, painter, option, index):
if not index.isValid():
return QStyledItemDelegate.paint(self, painter, option, index)
QStyledItemDelegate.paint(self, painter, option, index)
item = index.internalPointer()
if not item.data(Outline.goal):
return
p = toFloat(item.data(Outline.goalPercentage))
typ = item.data(Outline.type)
level = item.level()
if self.rootIndex and self.rootIndex.isValid():
level -= self.rootIndex.internalPointer().level() + 1
margin = 5
height = max(min(option.rect.height() - 2 * margin, 12) - 2 * level, 6)
painter.save()
rect = option.rect.adjusted(margin, margin, -margin, -margin)
# Move
rect.translate(level * rect.width() / 10, 0)
rect.setWidth(rect.width() - level * rect.width() / 10)
rect.setHeight(height)
rect.setTop(option.rect.top() + (option.rect.height() - height) / 2)
drawProgress(painter, rect, p) # from functions
painter.restore()
def displayText(self, value, locale):
return ""
class outlineStatusDelegate(QStyledItemDelegate):
def __init__(self, mdlStatus, parent=None):
QStyledItemDelegate.__init__(self, parent)
self.mdlStatus = mdlStatus
def sizeHint(self, option, index):
s = QStyledItemDelegate.sizeHint(self, option, index)
if s.width() > 150:
s.setWidth(150)
elif s.width() < 50:
s.setWidth(50)
return s + QSize(18, 0)
def createEditor(self, parent, option, index):
editor = QComboBox(parent)
editor.setAutoFillBackground(True)
editor.setFrame(False)
return editor
def setEditorData(self, editor, index):
for i in range(self.mdlStatus.rowCount()):
editor.addItem(self.mdlStatus.item(i, 0).text())
val = index.internalPointer().data(Outline.status)
if not val: val = 0
editor.setCurrentIndex(int(val))
editor.showPopup()
def setModelData(self, editor, model, index):
val = editor.currentIndex()
model.setData(index, val)
def displayText(self, value, locale):
try:
return self.mdlStatus.item(int(value), 0).text()
except:
return ""
def paint(self, painter, option, index):
QStyledItemDelegate.paint(self, painter, option, index)
if index.isValid() and index.internalPointer().data(Outline.status) not in ["", None, "0", 0]:
opt = QStyleOptionComboBox()
opt.rect = option.rect
r = qApp.style().subControlRect(QStyle.CC_ComboBox, opt, QStyle.SC_ComboBoxArrow)
option.rect = r
qApp.style().drawPrimitive(QStyle.PE_IndicatorArrowDown, option, painter)
class outlineLabelDelegate(QStyledItemDelegate):
def __init__(self, mdlLabels, parent=None):
QStyledItemDelegate.__init__(self, parent)
self.mdlLabels = mdlLabels
def sizeHint(self, option, index):
d = index.internalPointer().data(index.column(), Qt.DisplayRole)
if not d:
d = 0
item = self.mdlLabels.item(int(d), 0)
idx = self.mdlLabels.indexFromItem(item)
opt = QStyleOptionViewItem(option)
self.initStyleOption(opt, idx)
s = qApp.style().sizeFromContents(QStyle.CT_ItemViewItem, opt, QSize(), None)
if s.width() > 150:
s.setWidth(150)
elif s.width() < 50:
s.setWidth(50)
return s + QSize(18, 0)
def createEditor(self, parent, option, index):
item = index.internalPointer()
editor = QComboBox(parent)
# editor.setAutoFillBackground(True)
editor.setFrame(False)
return editor
def setEditorData(self, editor, index):
for i in range(self.mdlLabels.rowCount()):
editor.addItem(self.mdlLabels.item(i, 0).icon(),
self.mdlLabels.item(i, 0).text())
val = index.internalPointer().data(Outline.label)
if not val: val = 0
editor.setCurrentIndex(int(val))
editor.showPopup()
def setModelData(self, editor, model, index):
val = editor.currentIndex()
model.setData(index, val)
def paint(self, painter, option, index):
if not index.isValid():
return QStyledItemDelegate.paint(self, painter, option, index)
else:
item = index.internalPointer()
d = item.data(index.column(), Qt.DisplayRole)
if not d:
d = 0
lbl = self.mdlLabels.item(int(d), 0)
opt = QStyleOptionViewItem(option)
self.initStyleOption(opt, self.mdlLabels.indexFromItem(lbl))
qApp.style().drawControl(QStyle.CE_ItemViewItem, opt, painter)
# Drop down indicator
if index.isValid() and index.internalPointer().data(Outline.label) not in ["", None, "0", 0]:
opt = QStyleOptionComboBox()
opt.rect = option.rect
r = qApp.style().subControlRect(QStyle.CC_ComboBox, opt, QStyle.SC_ComboBoxArrow)
option.rect = r
qApp.style().drawPrimitive(QStyle.PE_IndicatorArrowDown, option, painter)
| gpl-3.0 |
genome21/dcos-cli | dcos/cmds.py | 5 | 1376 | import collections
from dcos.errors import DCOSException
Command = collections.namedtuple(
'Command',
['hierarchy', 'arg_keys', 'function'])
"""Describe a CLI command.
:param hierarchy: the noun and verbs that need to be set for the command to
execute
:type hierarchy: list of str
:param arg_keys: the arguments that must get passed to the function; the order
of the keys determines the order in which they get passed to
the function
:type arg_keys: list of str
:param function: the function to execute
:type function: func(args) -> int
"""
def execute(cmds, args):
"""Executes one of the commands based on the arguments passed.
:param cmds: commands to try to execute; the order determines the order of
evaluation
:type cmds: list of Command
:param args: command line arguments
:type args: dict
:returns: the process status
:rtype: int
"""
for hierarchy, arg_keys, function in cmds:
# Let's find if the function matches the command
match = True
for positional in hierarchy:
if not args[positional]:
match = False
if match:
params = [args[name] for name in arg_keys]
return function(*params)
raise DCOSException('Could not find a command with the passed arguments')
| apache-2.0 |
feroda/JAGOM | apps/basic_groups/tests/__init__.py | 3 | 2761 | from django.core.urlresolvers import reverse
from django.test import TestCase
from basic_groups.models import BasicGroup
class BasicGroupsTest(TestCase):
fixtures = ["basic_groups_auth.json"]
urls = "basic_groups.tests.basic_groups_urls"
def test_unauth_create_get(self):
"""
can an unauth'd user get to page?
"""
response = self.client.get(reverse("group_create"))
self.assertEqual(response.status_code, 302)
self.assertEqual(response["location"], "http://testserver/account/login/?next=/groups/create/")
def test_auth_create_get(self):
"""
can an auth'd user get to page?
"""
logged_in = self.client.login(username="tester", password="tester")
self.assertTrue(logged_in)
response = self.client.get(reverse("group_create"))
self.assertEqual(response.status_code, 200)
def test_unauth_create_post(self):
"""
can an unauth'd user post to create a new group?
"""
response = self.client.post(reverse("group_create"))
self.assertEqual(response.status_code, 302)
self.assertEqual(response["location"], "http://testserver/account/login/?next=/groups/create/")
def test_auth_create_post(self):
"""
can an auth'd user post to create a new group?
"""
logged_in = self.client.login(username="tester", password="tester")
self.assertTrue(logged_in)
response = self.client.post(reverse("group_create"), {
"slug": "test",
"name": "Test Group",
"description": "A test group.",
})
self.assertEqual(response.status_code, 302)
self.assertEqual(response["location"], "http://testserver/groups/group/test/")
self.assertEqual(BasicGroup.objects.get(slug="test").creator.username, "tester")
self.assertEqual(BasicGroup.objects.get(slug="test").members.all()[0].username, "tester")
def test_auth_creator_membership(self):
"""
is membership for creator correct?
"""
logged_in = self.client.login(username="tester", password="tester")
self.assertTrue(logged_in)
response = self.client.post(reverse("group_create"), {
"slug": "test",
"name": "Test Group",
"description": "A test group.",
})
response = self.client.get(reverse("group_detail", args=["test"]))
self.assertEqual(BasicGroup.objects.get(slug="test").creator.username, "tester")
self.assertEqual(BasicGroup.objects.get(slug="test").members.all()[0].username, "tester")
self.assertEqual(response.context[0]["is_member"], True)
| agpl-3.0 |
ENCODE-DCC/encoded | src/encoded/tests/test_types_analysis.py | 1 | 1533 | import pytest
def test_types_analysis_title(
testapp,
analysis_released,
encode4_award,
ENCODE3_award,
encode_lab,
file_bam_1_1,
file_bam_2_1,
analysis_step_run_chip_encode4,
analysis_step_run_dnase_encode4,
pipeline_dnase_encode4,
pipeline_chip_encode4,
):
testapp.patch_json(analysis_released['@id'], {'files': [file_bam_1_1['@id']]})
res = testapp.get(analysis_released['@id'] + '@@index-data')
assert res.json['object']['title'] == 'Lab custom mm10'
testapp.patch_json(
file_bam_1_1['@id'],
{'step_run': analysis_step_run_chip_encode4['@id']}
)
testapp.patch_json(
pipeline_chip_encode4['@id'], {'lab': encode_lab['@id'], 'award': encode4_award['@id']})
res = testapp.get(analysis_released['@id'] + '@@index-data')
assert res.json['object']['title'] == 'ENCODE4 mm10'
testapp.patch_json(analysis_released['@id'], {'files': [file_bam_1_1['@id'], file_bam_2_1['@id']]})
testapp.patch_json(
file_bam_1_1['@id'],
{'step_run': analysis_step_run_chip_encode4['@id']}
)
testapp.patch_json(
file_bam_2_1['@id'],
{'step_run': analysis_step_run_dnase_encode4['@id']}
)
testapp.patch_json(
pipeline_dnase_encode4['@id'], {'lab': encode_lab['@id'], 'award': ENCODE3_award['@id']}
)
res = testapp.get(analysis_released['@id'] + '@@index-data')
print (res.json['object'])
assert res.json['object']['title'] == 'Mixed uniform (ENCODE3, ENCODE4) mm10'
| mit |
wkrzemien/DIRAC | Resources/MessageQueue/test/Test_MQConsumer.py | 5 | 3828 | """Unit tests of MQConsumer interface in the DIRAC.Resources.MessageQueue.MQConsumer
"""
import unittest
from DIRAC import S_OK, S_ERROR
from DIRAC.Resources.MessageQueue.MQConsumer import MQConsumer
from DIRAC.Resources.MessageQueue.MQConnectionManager import MQConnectionManager
from DIRAC.Resources.MessageQueue.MQConnector import MQConnector
class FakeMQConnector( MQConnector ):
def __init__( self, params={} ):
super( FakeMQConnector, self ).__init__()
def disconnect(self):
return S_OK("FakeMQConnection disconnecting")
def get(self, destination = ''):
return "FakeMQConnection getting message"
def subscribe( self, parameters = None):
return S_OK( 'Subscription successful' )
def unsubscribe(self, parameters):
return S_OK( 'Unsubscription successful' )
class TestMQConsumer( unittest.TestCase ):
def setUp( self ):
self.maxDiff = None # To show full difference between structures in case of error
dest = {}
dest.update({'/queue/FakeQueue': ['consumer4', 'consumer2']})
dest4 = {'/queue/test3': ['consumer1', 'consumer2','consumer3','consumer4']}
conn1 = {'MQConnector':FakeMQConnector(), 'destinations':dest}
conn2 = {'MQConnector':FakeMQConnector(), 'destinations':dest4}
storage = {'fake.cern.ch':conn1, 'testdir.blabla.ch':conn2}
self.myManager = MQConnectionManager(connectionStorage = storage)
def tearDown( self ):
pass
class TestMQConsumer_get( TestMQConsumer):
def test_failure( self ):
consumer = MQConsumer(mqManager = self.myManager, mqURI = "fake.cern.ch::Queue::FakeQueue", consumerId = 'consumer1')
result = consumer.get()
self.assertFalse(result['OK'])
self.assertEqual(result['Message'], 'No messages ( 1141 : No messages in queue)')
def test_sucess( self ):
consumer = MQConsumer(mqManager = self.myManager, mqURI = "bad.cern.ch::Queue::FakeQueue", consumerId = 'consumer1')
result = consumer.get()
self.assertFalse(result['OK'])
class TestMQConsumer_close( TestMQConsumer):
def test_success( self ):
consumer = MQConsumer(mqManager = self.myManager, mqURI = "fake.cern.ch::Queue::FakeQueue", consumerId ='consumer4')
result = consumer.close()
self.assertTrue(result['OK'])
def test_failure( self ):
consumer = MQConsumer(mqManager = self.myManager, mqURI = "fake.cern.ch::Queue::FakeQueue", consumerId ='consumer4')
result = consumer.close()
self.assertTrue(result['OK'])
result = consumer.close()
self.assertFalse(result['OK'])
self.assertEqual(result['Message'], 'MQ connection failure ( 1142 : Failed to stop the connection!The messenger consumer4 does not exist!)')
def test_failure2( self ):
consumer = MQConsumer(mqManager = self.myManager, mqURI = "fake.cern.ch::Queue::FakeQueue", consumerId ='consumer4')
consumer2 = MQConsumer(mqManager = self.myManager, mqURI = "fake.cern.ch::Queue::FakeQueue", consumerId ='consumer2')
result = consumer.close()
self.assertTrue(result['OK'])
result = consumer.close()
self.assertFalse(result['OK'])
self.assertEqual(result['Message'], 'MQ connection failure ( 1142 : Failed to stop the connection!The messenger consumer4 does not exist!)')
result = consumer2.close()
self.assertTrue(result['OK'])
result = consumer2.close()
self.assertFalse(result['OK'])
self.assertEqual(result['Message'], 'MQ connection failure ( 1142 : Failed to stop the connection!The messenger consumer2 does not exist!)')
if __name__ == '__main__':
suite = unittest.defaultTestLoader.loadTestsFromTestCase( TestMQConsumer )
suite.addTest( unittest.defaultTestLoader.loadTestsFromTestCase( TestMQConsumer_get))
suite.addTest( unittest.defaultTestLoader.loadTestsFromTestCase( TestMQConsumer_close))
testResult = unittest.TextTestRunner( verbosity = 2 ).run( suite )
| gpl-3.0 |
zackslash/scrapy | scrapy/extensions/httpcache.py | 102 | 16232 | from __future__ import print_function
import os
import gzip
from six.moves import cPickle as pickle
from importlib import import_module
from time import time
from weakref import WeakKeyDictionary
from email.utils import mktime_tz, parsedate_tz
from w3lib.http import headers_raw_to_dict, headers_dict_to_raw
from scrapy.http import Headers, Response
from scrapy.responsetypes import responsetypes
from scrapy.utils.request import request_fingerprint
from scrapy.utils.project import data_path
from scrapy.utils.httpobj import urlparse_cached
class DummyPolicy(object):
def __init__(self, settings):
self.ignore_schemes = settings.getlist('HTTPCACHE_IGNORE_SCHEMES')
self.ignore_http_codes = [int(x) for x in settings.getlist('HTTPCACHE_IGNORE_HTTP_CODES')]
def should_cache_request(self, request):
return urlparse_cached(request).scheme not in self.ignore_schemes
def should_cache_response(self, response, request):
return response.status not in self.ignore_http_codes
def is_cached_response_fresh(self, response, request):
return True
def is_cached_response_valid(self, cachedresponse, response, request):
return True
class RFC2616Policy(object):
MAXAGE = 3600 * 24 * 365 # one year
def __init__(self, settings):
self.always_store = settings.getbool('HTTPCACHE_ALWAYS_STORE')
self.ignore_schemes = settings.getlist('HTTPCACHE_IGNORE_SCHEMES')
self.ignore_response_cache_controls = settings.getlist('HTTPCACHE_IGNORE_RESPONSE_CACHE_CONTROLS')
self._cc_parsed = WeakKeyDictionary()
def _parse_cachecontrol(self, r):
if r not in self._cc_parsed:
cch = r.headers.get('Cache-Control', '')
parsed = parse_cachecontrol(cch)
if isinstance(r, Response):
for key in self.ignore_response_cache_controls:
parsed.pop(key, None)
self._cc_parsed[r] = parsed
return self._cc_parsed[r]
def should_cache_request(self, request):
if urlparse_cached(request).scheme in self.ignore_schemes:
return False
cc = self._parse_cachecontrol(request)
# obey user-agent directive "Cache-Control: no-store"
if 'no-store' in cc:
return False
# Any other is eligible for caching
return True
def should_cache_response(self, response, request):
# What is cacheable - http://www.w3.org/Protocols/rfc2616/rfc2616-sec13.html#sec14.9.1
# Response cacheability - http://www.w3.org/Protocols/rfc2616/rfc2616-sec13.html#sec13.4
# Status code 206 is not included because cache can not deal with partial contents
cc = self._parse_cachecontrol(response)
# obey directive "Cache-Control: no-store"
if 'no-store' in cc:
return False
# Never cache 304 (Not Modified) responses
elif response.status == 304:
return False
# Cache unconditionally if configured to do so
elif self.always_store:
return True
# Any hint on response expiration is good
elif 'max-age' in cc or 'Expires' in response.headers:
return True
# Firefox fallbacks this statuses to one year expiration if none is set
elif response.status in (300, 301, 308):
return True
# Other statuses without expiration requires at least one validator
elif response.status in (200, 203, 401):
return 'Last-Modified' in response.headers or 'ETag' in response.headers
# Any other is probably not eligible for caching
# Makes no sense to cache responses that does not contain expiration
# info and can not be revalidated
else:
return False
def is_cached_response_fresh(self, cachedresponse, request):
cc = self._parse_cachecontrol(cachedresponse)
ccreq = self._parse_cachecontrol(request)
if 'no-cache' in cc or 'no-cache' in ccreq:
return False
now = time()
freshnesslifetime = self._compute_freshness_lifetime(cachedresponse, request, now)
currentage = self._compute_current_age(cachedresponse, request, now)
reqmaxage = self._get_max_age(ccreq)
if reqmaxage is not None:
freshnesslifetime = min(freshnesslifetime, reqmaxage)
if currentage < freshnesslifetime:
return True
if 'max-stale' in ccreq and 'must-revalidate' not in cc:
# From RFC2616: "Indicates that the client is willing to
# accept a response that has exceeded its expiration time.
# If max-stale is assigned a value, then the client is
# willing to accept a response that has exceeded its
# expiration time by no more than the specified number of
# seconds. If no value is assigned to max-stale, then the
# client is willing to accept a stale response of any age."
staleage = ccreq['max-stale']
if staleage is None:
return True
try:
if currentage < freshnesslifetime + max(0, int(staleage)):
return True
except ValueError:
pass
# Cached response is stale, try to set validators if any
self._set_conditional_validators(request, cachedresponse)
return False
def is_cached_response_valid(self, cachedresponse, response, request):
# Use the cached response if the new response is a server error,
# as long as the old response didn't specify must-revalidate.
if response.status >= 500:
cc = self._parse_cachecontrol(cachedresponse)
if 'must-revalidate' not in cc:
return True
# Use the cached response if the server says it hasn't changed.
return response.status == 304
def _set_conditional_validators(self, request, cachedresponse):
if 'Last-Modified' in cachedresponse.headers:
request.headers['If-Modified-Since'] = cachedresponse.headers['Last-Modified']
if 'ETag' in cachedresponse.headers:
request.headers['If-None-Match'] = cachedresponse.headers['ETag']
def _get_max_age(self, cc):
try:
return max(0, int(cc['max-age']))
except (KeyError, ValueError):
return None
def _compute_freshness_lifetime(self, response, request, now):
# Reference nsHttpResponseHead::ComputeFreshnessLifetime
# http://dxr.mozilla.org/mozilla-central/source/netwerk/protocol/http/nsHttpResponseHead.cpp#410
cc = self._parse_cachecontrol(response)
maxage = self._get_max_age(cc)
if maxage is not None:
return maxage
# Parse date header or synthesize it if none exists
date = rfc1123_to_epoch(response.headers.get('Date')) or now
# Try HTTP/1.0 Expires header
if 'Expires' in response.headers:
expires = rfc1123_to_epoch(response.headers['Expires'])
# When parsing Expires header fails RFC 2616 section 14.21 says we
# should treat this as an expiration time in the past.
return max(0, expires - date) if expires else 0
# Fallback to heuristic using last-modified header
# This is not in RFC but on Firefox caching implementation
lastmodified = rfc1123_to_epoch(response.headers.get('Last-Modified'))
if lastmodified and lastmodified <= date:
return (date - lastmodified) / 10
# This request can be cached indefinitely
if response.status in (300, 301, 308):
return self.MAXAGE
# Insufficient information to compute fresshness lifetime
return 0
def _compute_current_age(self, response, request, now):
# Reference nsHttpResponseHead::ComputeCurrentAge
# http://dxr.mozilla.org/mozilla-central/source/netwerk/protocol/http/nsHttpResponseHead.cpp#366
currentage = 0
# If Date header is not set we assume it is a fast connection, and
# clock is in sync with the server
date = rfc1123_to_epoch(response.headers.get('Date')) or now
if now > date:
currentage = now - date
if 'Age' in response.headers:
try:
age = int(response.headers['Age'])
currentage = max(currentage, age)
except ValueError:
pass
return currentage
class DbmCacheStorage(object):
def __init__(self, settings):
self.cachedir = data_path(settings['HTTPCACHE_DIR'], createdir=True)
self.expiration_secs = settings.getint('HTTPCACHE_EXPIRATION_SECS')
self.dbmodule = import_module(settings['HTTPCACHE_DBM_MODULE'])
self.db = None
def open_spider(self, spider):
dbpath = os.path.join(self.cachedir, '%s.db' % spider.name)
self.db = self.dbmodule.open(dbpath, 'c')
def close_spider(self, spider):
self.db.close()
def retrieve_response(self, spider, request):
data = self._read_data(spider, request)
if data is None:
return # not cached
url = data['url']
status = data['status']
headers = Headers(data['headers'])
body = data['body']
respcls = responsetypes.from_args(headers=headers, url=url)
response = respcls(url=url, headers=headers, status=status, body=body)
return response
def store_response(self, spider, request, response):
key = self._request_key(request)
data = {
'status': response.status,
'url': response.url,
'headers': dict(response.headers),
'body': response.body,
}
self.db['%s_data' % key] = pickle.dumps(data, protocol=2)
self.db['%s_time' % key] = str(time())
def _read_data(self, spider, request):
key = self._request_key(request)
db = self.db
tkey = '%s_time' % key
if tkey not in db:
return # not found
ts = db[tkey]
if 0 < self.expiration_secs < time() - float(ts):
return # expired
return pickle.loads(db['%s_data' % key])
def _request_key(self, request):
return request_fingerprint(request)
class FilesystemCacheStorage(object):
def __init__(self, settings):
self.cachedir = data_path(settings['HTTPCACHE_DIR'])
self.expiration_secs = settings.getint('HTTPCACHE_EXPIRATION_SECS')
self.use_gzip = settings.getbool('HTTPCACHE_GZIP')
self._open = gzip.open if self.use_gzip else open
def open_spider(self, spider):
pass
def close_spider(self, spider):
pass
def retrieve_response(self, spider, request):
"""Return response if present in cache, or None otherwise."""
metadata = self._read_meta(spider, request)
if metadata is None:
return # not cached
rpath = self._get_request_path(spider, request)
with self._open(os.path.join(rpath, 'response_body'), 'rb') as f:
body = f.read()
with self._open(os.path.join(rpath, 'response_headers'), 'rb') as f:
rawheaders = f.read()
url = metadata.get('response_url')
status = metadata['status']
headers = Headers(headers_raw_to_dict(rawheaders))
respcls = responsetypes.from_args(headers=headers, url=url)
response = respcls(url=url, headers=headers, status=status, body=body)
return response
def store_response(self, spider, request, response):
"""Store the given response in the cache."""
rpath = self._get_request_path(spider, request)
if not os.path.exists(rpath):
os.makedirs(rpath)
metadata = {
'url': request.url,
'method': request.method,
'status': response.status,
'response_url': response.url,
'timestamp': time(),
}
with self._open(os.path.join(rpath, 'meta'), 'wb') as f:
f.write(repr(metadata))
with self._open(os.path.join(rpath, 'pickled_meta'), 'wb') as f:
pickle.dump(metadata, f, protocol=2)
with self._open(os.path.join(rpath, 'response_headers'), 'wb') as f:
f.write(headers_dict_to_raw(response.headers))
with self._open(os.path.join(rpath, 'response_body'), 'wb') as f:
f.write(response.body)
with self._open(os.path.join(rpath, 'request_headers'), 'wb') as f:
f.write(headers_dict_to_raw(request.headers))
with self._open(os.path.join(rpath, 'request_body'), 'wb') as f:
f.write(request.body)
def _get_request_path(self, spider, request):
key = request_fingerprint(request)
return os.path.join(self.cachedir, spider.name, key[0:2], key)
def _read_meta(self, spider, request):
rpath = self._get_request_path(spider, request)
metapath = os.path.join(rpath, 'pickled_meta')
if not os.path.exists(metapath):
return # not found
mtime = os.stat(rpath).st_mtime
if 0 < self.expiration_secs < time() - mtime:
return # expired
with self._open(metapath, 'rb') as f:
return pickle.load(f)
class LeveldbCacheStorage(object):
def __init__(self, settings):
import leveldb
self._leveldb = leveldb
self.cachedir = data_path(settings['HTTPCACHE_DIR'], createdir=True)
self.expiration_secs = settings.getint('HTTPCACHE_EXPIRATION_SECS')
self.db = None
def open_spider(self, spider):
dbpath = os.path.join(self.cachedir, '%s.leveldb' % spider.name)
self.db = self._leveldb.LevelDB(dbpath)
def close_spider(self, spider):
# Do compactation each time to save space and also recreate files to
# avoid them being removed in storages with timestamp-based autoremoval.
self.db.CompactRange()
del self.db
def retrieve_response(self, spider, request):
data = self._read_data(spider, request)
if data is None:
return # not cached
url = data['url']
status = data['status']
headers = Headers(data['headers'])
body = data['body']
respcls = responsetypes.from_args(headers=headers, url=url)
response = respcls(url=url, headers=headers, status=status, body=body)
return response
def store_response(self, spider, request, response):
key = self._request_key(request)
data = {
'status': response.status,
'url': response.url,
'headers': dict(response.headers),
'body': response.body,
}
batch = self._leveldb.WriteBatch()
batch.Put('%s_data' % key, pickle.dumps(data, protocol=2))
batch.Put('%s_time' % key, str(time()))
self.db.Write(batch)
def _read_data(self, spider, request):
key = self._request_key(request)
try:
ts = self.db.Get('%s_time' % key)
except KeyError:
return # not found or invalid entry
if 0 < self.expiration_secs < time() - float(ts):
return # expired
try:
data = self.db.Get('%s_data' % key)
except KeyError:
return # invalid entry
else:
return pickle.loads(data)
def _request_key(self, request):
return request_fingerprint(request)
def parse_cachecontrol(header):
"""Parse Cache-Control header
http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.9
>>> parse_cachecontrol('public, max-age=3600') == {'public': None,
... 'max-age': '3600'}
True
>>> parse_cachecontrol('') == {}
True
"""
directives = {}
for directive in header.split(','):
key, sep, val = directive.strip().partition('=')
if key:
directives[key.lower()] = val if sep else None
return directives
def rfc1123_to_epoch(date_str):
try:
return mktime_tz(parsedate_tz(date_str))
except Exception:
return None
| bsd-3-clause |
FedoraScientific/salome-hexablock | src/TEST_PY/test_unit/test_cuve.py | 1 | 2161 | # -*- coding: latin-1 -*-
# Copyright (C) 2009-2014 CEA/DEN, EDF R&D
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
#
# Francis KLOSS - 2011-2013 - CEA-Saclay, DEN, DM2S, SFME, LGLS, F-91191 Gif-sur-Yvette, France
# =============================================================================================
import hexablock
doc = hexablock.addDocument("cuve")
# Construire la grille cylindrique
# --------------------------------
centre = doc.addVertex(0, 0, 0)
vecteur_px = doc.addVector(1, 0, 0)
vecteur_pz = doc.addVector(0, 0, 1)
grille = doc.makeCylindrical(centre, vecteur_px, vecteur_pz, 1, 360, 1, 3, 8, 1, False)
# Ajouter le centre
# -----------------
quad_0 = grille.getQuadJK(0, 0, 0)
quad_6 = grille.getQuadJK(0, 6, 0)
quad_7 = grille.getQuadJK(0, 7, 0)
hexa_a = doc.addHexa3Quads(quad_0, quad_6, quad_7)
quad_2 = grille.getQuadJK(0, 2, 0)
quad_3 = grille.getQuadJK(0, 3, 0)
quad_4 = grille.getQuadJK(0, 4, 0)
hexa_b = doc.addHexa3Quads(quad_2, quad_3, quad_4)
quad_1 = grille.getQuadJK(0, 1, 0)
quad_5 = grille.getQuadJK(0, 5, 0)
quad_a = hexa_a.getQuad(1)
quad_b = hexa_b.getQuad(1)
hexa_c = doc.addHexa4Quads(quad_1, quad_a, quad_5, quad_b)
law = doc.addLaw("Uniform", 3)
for j in range(doc.countPropagation()):
propa = doc.getPropagation(j)
propa.setLaw(law)
mesh_hexas = hexablock.mesh (doc)
| lgpl-2.1 |
ptdtan/Ragout | lib/networkx/algorithms/block.py | 47 | 4055 | # encoding: utf-8
"""
Functions for creating network blockmodels from node partitions.
Created by Drew Conway <drew.conway@nyu.edu>
Copyright (c) 2010. All rights reserved.
"""
__author__ = """\n""".join(['Drew Conway <drew.conway@nyu.edu>',
'Aric Hagberg <hagberg@lanl.gov>'])
__all__=['blockmodel']
import networkx as nx
def blockmodel(G,partitions,multigraph=False):
"""Returns a reduced graph constructed using the generalized block modeling
technique.
The blockmodel technique collapses nodes into blocks based on a
given partitioning of the node set. Each partition of nodes
(block) is represented as a single node in the reduced graph.
Edges between nodes in the block graph are added according to the
edges in the original graph. If the parameter multigraph is False
(the default) a single edge is added with a weight equal to the
sum of the edge weights between nodes in the original graph
The default is a weight of 1 if weights are not specified. If the
parameter multigraph is True then multiple edges are added each
with the edge data from the original graph.
Parameters
----------
G : graph
A networkx Graph or DiGraph
partitions : list of lists, or list of sets
The partition of the nodes. Must be non-overlapping.
multigraph : bool, optional
If True return a MultiGraph with the edge data of the original
graph applied to each corresponding edge in the new graph.
If False return a Graph with the sum of the edge weights, or a
count of the edges if the original graph is unweighted.
Returns
-------
blockmodel : a Networkx graph object
Examples
--------
>>> G=nx.path_graph(6)
>>> partition=[[0,1],[2,3],[4,5]]
>>> M=nx.blockmodel(G,partition)
References
----------
.. [1] Patrick Doreian, Vladimir Batagelj, and Anuska Ferligoj
"Generalized Blockmodeling",Cambridge University Press, 2004.
"""
# Create sets of node partitions
part=list(map(set,partitions))
# Check for overlapping node partitions
u=set()
for p1,p2 in zip(part[:-1],part[1:]):
u.update(p1)
#if not u.isdisjoint(p2): # Python 2.6 required
if len (u.intersection(p2))>0:
raise nx.NetworkXException("Overlapping node partitions.")
# Initialize blockmodel graph
if multigraph:
if G.is_directed():
M=nx.MultiDiGraph()
else:
M=nx.MultiGraph()
else:
if G.is_directed():
M=nx.DiGraph()
else:
M=nx.Graph()
# Add nodes and properties to blockmodel
# The blockmodel nodes are node-induced subgraphs of G
# Label them with integers starting at 0
for i,p in zip(range(len(part)),part):
M.add_node(i)
# The node-induced subgraph is stored as the node 'graph' attribute
SG=G.subgraph(p)
M.node[i]['graph']=SG
M.node[i]['nnodes']=SG.number_of_nodes()
M.node[i]['nedges']=SG.number_of_edges()
M.node[i]['density']=nx.density(SG)
# Create mapping between original node labels and new blockmodel node labels
block_mapping={}
for n in M:
nodes_in_block=M.node[n]['graph'].nodes()
block_mapping.update(dict.fromkeys(nodes_in_block,n))
# Add edges to block graph
for u,v,d in G.edges(data=True):
bmu=block_mapping[u]
bmv=block_mapping[v]
if bmu==bmv: # no self loops
continue
if multigraph:
# For multigraphs add an edge for each edge in original graph
M.add_edge(bmu,bmv,attr_dict=d)
else:
# For graphs and digraphs add single weighted edge
weight=d.get('weight',1.0) # default to 1 if no weight specified
if M.has_edge(bmu,bmv):
M[bmu][bmv]['weight']+=weight
else:
M.add_edge(bmu,bmv,weight=weight)
return M
| gpl-3.0 |
damdam-s/account-analytic | __unported__/analytic_secondaxis/wizard/__init__.py | 11 | 1436 | # -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (c) 2010 Camptocamp SA (http://www.camptocamp.com)
# All Right Reserved
#
# Author : Joel Grand-guillaume (Camptocamp)
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsability of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# garantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
from . import analytic_activity_chart
| agpl-3.0 |
ColdHeat/binaryninja-python | Util.py | 4 | 10622 | # Copyright (c) 2011-2015 Rusty Wagner
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import struct
from PySide.QtCore import *
from PySide.QtGui import *
from Crypto.Hash import MD2
from Crypto.Hash import MD4
from Crypto.Hash import MD5
from Crypto.Hash import SHA
from Crypto.Hash import SHA256
from Crypto.Hash import HMAC
import Transform
def hex_dump_encode(data):
result = ""
for i in range(0, len(data), 16):
result += "%.8x:" % i
hex = ""
ascii = ""
for j in range(0, 16):
if (i + j) >= len(data):
hex += " "
else:
hex += " %.2x" % ord(data[i + j])
if (data[i + j] < ' ') or (data[i + j] > '~'):
ascii += "."
else:
ascii += data[i + j]
result += hex + " " + ascii + "\n"
return result
def hex_dump_decode(data):
result = ""
lines = data.split("\n")
for line in lines:
# Hex dump lines follow the following format:
# * An address, followed by any number of spaces
# * The hex dump itself, 16 bytes per line
# * Optionally two or more spaces, followed by the ASCII dump
line.strip(" \t")
if line.find(' ') == -1:
continue
hex = line[line.find(' '):].strip(" \t")
if hex.find(" ") != -1:
hex = hex[0:hex.find(" ")]
hex = hex.replace(" ", "")
hex = hex[0:32]
result += hex.decode("hex")
return result
def encode_utf16_string(data, char_escape):
if len(data) % 2:
raise ValueError, "Odd number of bytes"
result = ""
for i in range(0, len(data), 2):
value = struct.unpack("<H", data[i:i+2])[0]
if (value >= ' ') and (value <= '~'):
result += chr(value)
else:
result += char_escape + ("%.4x" % value)
return result
def encode_url(data):
result = ""
for i in range(0, len(data)):
if data[i] in ['-', '_', '.', '~']:
result += data[i]
elif (data[i] >= '0') and (data[i] <= '9'):
result += data[i]
elif (data[i] >= 'a') and (data[i] <= 'z'):
result += data[i]
elif (data[i] >= 'A') and (data[i] <= 'Z'):
result += data[i]
else:
result += "%%%.2x" % ord(data[i])
return result
def decode_url(data):
result = ""
i = 0
while i < len(data):
if data[i] == '%':
if data[i + 1] == 'u':
result += unichr(int(data[i+2:i+6], 16)).encode("utf8")
i += 6
else:
result += chr(int(data[i+1:i+3], 16))
i += 3
else:
result += data[i]
i += 1
return result
def encode_c_array(data, element_size, element_struct, type_name, postfix):
if len(data) % element_size:
raise ValueError, "Data length is not a multiple of the element size"
fmt = "0x%%.%dx%s" % (element_size * 2, postfix)
result = "{\n"
for i in range(0, len(data), 16):
line = ""
for j in range(0, 16, element_size):
if (i + j) >= len(data):
break
if j > 0:
line += ", "
value = struct.unpack(element_struct, data[i+j:i+j+element_size])[0]
line += fmt % value
if (i + 16) < len(data):
line += ","
result += "\t" + line + "\n"
return type_name + (" data[%d] = \n" % (len(data) / element_size)) + result + "};\n"
def decode_int_list(data, signed, unsigned):
result = ""
list = data.split(",")
for i in list:
i = i.strip(" \t\r\n")
value = int(i, 0)
if value < 0:
result += struct.pack(signed, value)
else:
result += struct.pack(unsigned, value)
return result
class CancelException(Exception):
pass
def request_key(obj):
dlg = Transform.KeyDialog(obj)
if dlg.exec_() == QDialog.Rejected:
raise CancelException
return dlg.key[:]
def populate_copy_as_menu(menu, obj, action_table):
string_menu = menu.addMenu("Escaped string")
action_table[string_menu.addAction("ASCII")] = lambda : obj.copy_as(lambda data : data.encode("string_escape").replace("\"", "\\\""), False)
action_table[string_menu.addAction("UTF-8 URL")] = lambda : obj.copy_as(encode_url, False)
action_table[string_menu.addAction("UTF-8 IDNA")] = lambda : obj.copy_as(lambda data : data.decode("utf8").encode("idna"), False)
action_table[string_menu.addAction("UTF-16 (\\u)")] = lambda : obj.copy_as(lambda data : encode_utf16_string(data, "\\u"), False)
action_table[string_menu.addAction("UTF-16 (%u)")] = lambda : obj.copy_as(lambda data : encode_utf16_string(data, "%u"), False)
action_table[string_menu.addAction("UTF-16 URL")] = lambda : obj.copy_as(lambda data : encode_url(data.decode("utf16").encode("utf8")), False)
action_table[string_menu.addAction("UTF-16 IDNA")] = lambda : obj.copy_as(lambda data : data.decode("utf16").encode("idna"), False)
unicode_menu = menu.addMenu("Unicode")
action_table[unicode_menu.addAction("UTF-16")] = lambda : obj.copy_as(lambda data : data.decode("utf16"), False)
action_table[unicode_menu.addAction("UTF-32")] = lambda : obj.copy_as(lambda data : data.decode("utf32"), False)
menu.addSeparator()
action_table[menu.addAction("Hex dump")] = lambda : obj.copy_as(hex_dump_encode, False)
action_table[menu.addAction("Raw hex")] = lambda : obj.copy_as(lambda data : data.encode("hex"), False)
action_table[menu.addAction("Base64")] = lambda : obj.copy_as(lambda data : data.encode("base64"), False)
action_table[menu.addAction("UUEncode")] = lambda : obj.copy_as(lambda data : data.encode("uu_codec"), False)
compress_menu = menu.addMenu("Compressed")
action_table[compress_menu.addAction("zlib")] = lambda : obj.copy_as(lambda data : data.encode("zlib"), True)
action_table[compress_menu.addAction("bz2")] = lambda : obj.copy_as(lambda data : data.encode("bz2"), True)
menu.addSeparator()
array_menu = menu.addMenu("C array")
action_table[array_menu.addAction("8-bit elements")] = lambda : obj.copy_as(lambda data : encode_c_array(data, 1, "B", "unsigned char", ""), False)
action_table[array_menu.addAction("16-bit elements")] = lambda : obj.copy_as(lambda data : encode_c_array(data, 2, "<H", "unsigned short", ""), False)
action_table[array_menu.addAction("32-bit elements")] = lambda : obj.copy_as(lambda data : encode_c_array(data, 4, "<I", "unsigned int", ""), False)
action_table[array_menu.addAction("64-bit elements")] = lambda : obj.copy_as(lambda data : encode_c_array(data, 8, "<Q", "unsigned long long", "LL"), False)
menu.addSeparator()
hash_menu = menu.addMenu("Hash")
action_table[hash_menu.addAction("MD2")] = lambda : obj.copy_as(lambda data : MD2.new(data).digest(), True)
action_table[hash_menu.addAction("MD4")] = lambda : obj.copy_as(lambda data : MD4.new(data).digest(), True)
action_table[hash_menu.addAction("MD5")] = lambda : obj.copy_as(lambda data : MD5.new(data).digest(), True)
action_table[hash_menu.addAction("SHA-1")] = lambda : obj.copy_as(lambda data : SHA.new(data).digest(), True)
action_table[hash_menu.addAction("SHA-256")] = lambda : obj.copy_as(lambda data : SHA256.new(data).digest(), True)
hmac_menu = hash_menu.addMenu("HMAC")
action_table[hmac_menu.addAction("MD2")] = lambda : obj.copy_as(lambda data : HMAC.new(request_key(obj), data, MD2).digest(), True)
action_table[hmac_menu.addAction("MD4")] = lambda : obj.copy_as(lambda data : HMAC.new(request_key(obj), data, MD4).digest(), True)
action_table[hmac_menu.addAction("MD5")] = lambda : obj.copy_as(lambda data : HMAC.new(request_key(obj), data, MD5).digest(), True)
action_table[hmac_menu.addAction("SHA-1")] = lambda : obj.copy_as(lambda data : HMAC.new(request_key(obj), data, SHA).digest(), True)
action_table[hmac_menu.addAction("SHA-256")] = lambda : obj.copy_as(lambda data : HMAC.new(request_key(obj), data, SHA256).digest(), True)
def populate_paste_from_menu(menu, obj, action_table):
string_menu = menu.addMenu("Escaped string")
action_table[string_menu.addAction("ASCII")] = lambda : obj.paste_from(lambda data : data.decode("string_escape"))
action_table[string_menu.addAction("UTF-8 URL")] = lambda : obj.paste_from(decode_url)
action_table[string_menu.addAction("UTF-8 IDNA")] = lambda : obj.paste_from(lambda data : data.decode("idna").encode("utf8"))
action_table[string_menu.addAction("UTF-16 (\\u)")] = lambda : obj.paste_from(lambda data : data.decode("unicode_escape").encode("utf-16le"))
action_table[string_menu.addAction("UTF-16 (%u)")] = lambda : obj.paste_from(lambda data : decode_url(data).decode("utf8").encode("utf-16le"))
action_table[string_menu.addAction("UTF-16 URL")] = lambda : obj.paste_from(lambda data : decode_url(data).decode("utf8").encode("utf-16le"))
action_table[string_menu.addAction("UTF-16 IDNA")] = lambda : obj.paste_from(lambda data : data.decode("idna").encode("utf-16le"))
unicode_menu = menu.addMenu("Unicode")
action_table[unicode_menu.addAction("UTF-16")] = lambda : obj.paste_from(lambda data : data.decode("utf8").encode("utf-16le"))
action_table[unicode_menu.addAction("UTF-32")] = lambda : obj.paste_from(lambda data : data.decode("utf8").encode("utf-32le"))
menu.addSeparator()
action_table[menu.addAction("Hex dump")] = lambda : obj.paste_from(hex_dump_decode)
action_table[menu.addAction("Raw hex")] = lambda : obj.paste_from(lambda data : data.translate(None, " ,\t\r\n").decode("hex"))
action_table[menu.addAction("Base64")] = lambda : obj.paste_from(lambda data : data.decode("base64"))
action_table[menu.addAction("UUEncode")] = lambda : obj.paste_from(lambda data : data.decode("uu_codec"))
action_table[menu.addAction("Python expression")] = lambda : obj.paste_from(lambda data : eval(data))
compress_menu = menu.addMenu("Compressed")
action_table[compress_menu.addAction("zlib")] = lambda : obj.paste_from(lambda data : data.decode("zlib"))
action_table[compress_menu.addAction("bz2")] = lambda : obj.paste_from(lambda data : data.decode("bz2"))
menu.addSeparator()
list_menu = menu.addMenu("Integer list")
action_table[list_menu.addAction("8-bit elements")] = lambda : obj.paste_from(lambda data : decode_int_list(data, "b", "B"))
action_table[list_menu.addAction("16-bit elements")] = lambda : obj.paste_from(lambda data : decode_int_list(data, "<h", "<H"))
action_table[list_menu.addAction("32-bit elements")] = lambda : obj.paste_from(lambda data : decode_int_list(data, "<i", "<I"))
action_table[list_menu.addAction("64-bit elements")] = lambda : obj.paste_from(lambda data : decode_int_list(data, "<q", "<Q"))
| gpl-2.0 |
jmesteve/openerp | openerp/addons/stock_invoice_directly/wizard/stock_invoice.py | 39 | 2344 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
class invoice_directly(osv.osv_memory):
_inherit = 'stock.partial.picking'
def do_partial(self, cr, uid, ids, context=None):
"""Launch Create invoice wizard if invoice state is To be Invoiced,
after processing the partial picking.
"""
if context is None: context = {}
result = super(invoice_directly, self).do_partial(cr, uid, ids, context)
partial = self.browse(cr, uid, ids[0], context)
if partial.picking_id.state != 'done' and partial.picking_id.backorder_id:
# delivery is not finished, opening invoice on backorder
picking = partial.picking_id.backorder_id
else:
picking = partial.picking_id
context.update(active_model='stock.picking',
active_ids=[picking.id])
if picking.invoice_state == '2binvoiced':
return {
'name': 'Create Invoice',
'view_type': 'form',
'view_mode': 'form',
'res_model': 'stock.invoice.onshipping',
'type': 'ir.actions.act_window',
'target': 'new',
'context': context
}
return {'type': 'ir.actions.act_window_close'}
invoice_directly()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
ygol/odoo | addons/membership/__openerp__.py | 197 | 2207 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Membership Management',
'version': '0.1',
'category': 'Association',
'description': """
This module allows you to manage all operations for managing memberships.
=========================================================================
It supports different kind of members:
--------------------------------------
* Free member
* Associated member (e.g.: a group subscribes to a membership for all subsidiaries)
* Paid members
* Special member prices
It is integrated with sales and accounting to allow you to automatically
invoice and send propositions for membership renewal.
""",
'author': 'OpenERP SA',
'depends': ['base', 'product', 'account'],
'data': [
'security/ir.model.access.csv',
'wizard/membership_invoice_view.xml',
'membership_data.xml',
'membership_view.xml',
'report/report_membership_view.xml',
],
'demo': [
'membership_demo.xml',
'membership_demo.yml'
],
'website': 'https://www.odoo.com/page/community-builder',
'test': ['test/test_membership.yml'],
'installable': True,
'auto_install': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
DPaaS-Raksha/horizon | horizon/exceptions.py | 7 | 12145 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Exceptions raised by the Horizon code and the machinery for handling them.
"""
import logging
import os
import sys
from django.contrib.auth import logout
from django.http import HttpRequest
from django.utils import termcolors
from django.utils.translation import ugettext as _
from django.views.debug import SafeExceptionReporterFilter, CLEANSED_SUBSTITUTE
from horizon import messages
from horizon.conf import HORIZON_CONFIG
LOG = logging.getLogger(__name__)
PALETTE = termcolors.PALETTES[termcolors.DEFAULT_PALETTE]
class HorizonReporterFilter(SafeExceptionReporterFilter):
""" Error report filter that's always active, even in DEBUG mode. """
def is_active(self, request):
return True
# TODO(gabriel): This bugfix is cribbed from Django's code. When 1.4.1
# is available we can remove this code.
def get_traceback_frame_variables(self, request, tb_frame):
"""
Replaces the values of variables marked as sensitive with
stars (*********).
"""
# Loop through the frame's callers to see if the sensitive_variables
# decorator was used.
current_frame = tb_frame.f_back
sensitive_variables = None
while current_frame is not None:
if (current_frame.f_code.co_name == 'sensitive_variables_wrapper'
and 'sensitive_variables_wrapper'
in current_frame.f_locals):
# The sensitive_variables decorator was used, so we take note
# of the sensitive variables' names.
wrapper = current_frame.f_locals['sensitive_variables_wrapper']
sensitive_variables = getattr(wrapper,
'sensitive_variables',
None)
break
current_frame = current_frame.f_back
cleansed = []
if self.is_active(request) and sensitive_variables:
if sensitive_variables == '__ALL__':
# Cleanse all variables
for name, value in tb_frame.f_locals.items():
cleansed.append((name, CLEANSED_SUBSTITUTE))
return cleansed
else:
# Cleanse specified variables
for name, value in tb_frame.f_locals.items():
if name in sensitive_variables:
value = CLEANSED_SUBSTITUTE
elif isinstance(value, HttpRequest):
# Cleanse the request's POST parameters.
value = self.get_request_repr(value)
cleansed.append((name, value))
return cleansed
else:
# Potentially cleanse only the request if it's one of the
# frame variables.
for name, value in tb_frame.f_locals.items():
if isinstance(value, HttpRequest):
# Cleanse the request's POST parameters.
value = self.get_request_repr(value)
cleansed.append((name, value))
return cleansed
class HorizonException(Exception):
""" Base exception class for distinguishing our own exception classes. """
pass
class Http302(HorizonException):
"""
Error class which can be raised from within a handler to cause an
early bailout and redirect at the middleware level.
"""
status_code = 302
def __init__(self, location, message=None):
self.location = location
self.message = message
class NotAuthorized(HorizonException):
"""
Raised whenever a user attempts to access a resource which they do not
have permission-based access to (such as when failing the
:func:`~horizon.decorators.require_perms` decorator).
The included :class:`~horizon.middleware.HorizonMiddleware` catches
``NotAuthorized`` and handles it gracefully by displaying an error
message and redirecting the user to a login page.
"""
status_code = 401
class NotAuthenticated(HorizonException):
"""
Raised when a user is trying to make requests and they are not logged in.
The included :class:`~horizon.middleware.HorizonMiddleware` catches
``NotAuthenticated`` and handles it gracefully by displaying an error
message and redirecting the user to a login page.
"""
status_code = 403
class NotFound(HorizonException):
""" Generic error to replace all "Not Found"-type API errors. """
status_code = 404
class RecoverableError(HorizonException):
""" Generic error to replace any "Recoverable"-type API errors. """
status_code = 100 # HTTP status code "Continue"
class ServiceCatalogException(HorizonException):
"""
Raised when a requested service is not available in the ``ServiceCatalog``
returned by Keystone.
"""
def __init__(self, service_name):
message = 'Invalid service catalog service: %s' % service_name
super(ServiceCatalogException, self).__init__(message)
class AlreadyExists(HorizonException):
"""
Exception to be raised when trying to create an API resource which
already exists.
"""
def __init__(self, name, resource_type):
self.attrs = {"name": name, "resource": resource_type}
self.msg = 'A %(resource)s with the name "%(name)s" already exists.'
def __repr__(self):
return self.msg % self.attrs
def __str__(self):
return self.msg % self.attrs
def __unicode__(self):
return _(self.msg) % self.attrs
class WorkflowError(HorizonException):
""" Exception to be raised when something goes wrong in a workflow. """
pass
class WorkflowValidationError(HorizonException):
"""
Exception raised during workflow validation if required data is missing,
or existing data is not valid.
"""
pass
class HandledException(HorizonException):
"""
Used internally to track exceptions that have gone through
:func:`horizon.exceptions.handle` more than once.
"""
def __init__(self, wrapped):
self.wrapped = wrapped
UNAUTHORIZED = tuple(HORIZON_CONFIG['exceptions']['unauthorized'])
NOT_FOUND = tuple(HORIZON_CONFIG['exceptions']['not_found'])
RECOVERABLE = (AlreadyExists,)
RECOVERABLE += tuple(HORIZON_CONFIG['exceptions']['recoverable'])
def error_color(msg):
return termcolors.colorize(msg, **PALETTE['ERROR'])
def check_message(keywords, message):
"""
Checks an exception for given keywords and raises a new ``ActionError``
with the desired message if the keywords are found. This allows selective
control over API error messages.
"""
exc_type, exc_value, exc_traceback = sys.exc_info()
if set(str(exc_value).split(" ")).issuperset(set(keywords)):
exc_value._safe_message = message
raise
def handle(request, message=None, redirect=None, ignore=False,
escalate=False, log_level=None, force_log=None):
""" Centralized error handling for Horizon.
Because Horizon consumes so many different APIs with completely
different ``Exception`` types, it's necessary to have a centralized
place for handling exceptions which may be raised.
Exceptions are roughly divided into 3 types:
#. ``UNAUTHORIZED``: Errors resulting from authentication or authorization
problems. These result in being logged out and sent to the login screen.
#. ``NOT_FOUND``: Errors resulting from objects which could not be
located via the API. These generally result in a user-facing error
message, but are otherwise returned to the normal code flow. Optionally
a redirect value may be passed to the error handler so users are
returned to a different view than the one requested in addition to the
error message.
#. RECOVERABLE: Generic API errors which generate a user-facing message
but drop directly back to the regular code flow.
All other exceptions bubble the stack as normal unless the ``ignore``
argument is passed in as ``True``, in which case only unrecognized
errors are bubbled.
If the exception is not re-raised, an appropriate wrapper exception
class indicating the type of exception that was encountered will be
returned.
"""
exc_type, exc_value, exc_traceback = sys.exc_info()
log_method = getattr(LOG, log_level or "exception")
force_log = force_log or os.environ.get("HORIZON_TEST_RUN", False)
force_silence = getattr(exc_value, "silence_logging", False)
# Because the same exception may travel through this method more than
# once (if it's re-raised) we may want to treat it differently
# the second time (e.g. no user messages/logging).
handled = issubclass(exc_type, HandledException)
wrap = False
# Restore our original exception information, but re-wrap it at the end
if handled:
exc_type, exc_value, exc_traceback = exc_value.wrapped
wrap = True
# We trust messages from our own exceptions
if issubclass(exc_type, HorizonException):
message = exc_value
# Check for an override message
elif getattr(exc_value, "_safe_message", None):
message = exc_value._safe_message
# If the message has a placeholder for the exception, fill it in
elif message and "%(exc)s" in message:
message = message % {"exc": exc_value}
if issubclass(exc_type, UNAUTHORIZED):
if ignore:
return NotAuthorized
if not force_silence and not handled:
log_method(error_color("Unauthorized: %s" % exc_value))
if not handled:
if message:
message = _("Unauthorized: %s") % message
# We get some pretty useless error messages back from
# some clients, so let's define our own fallback.
fallback = _("Unauthorized. Please try logging in again.")
messages.error(request, message or fallback)
# Escalation means logging the user out and raising NotAuthorized
# so the middleware will redirect them appropriately.
if escalate:
logout(request)
raise NotAuthorized
# Otherwise continue and present our "unauthorized" error message.
return NotAuthorized
if issubclass(exc_type, NOT_FOUND):
wrap = True
if not force_silence and not handled and (not ignore or force_log):
log_method(error_color("Not Found: %s" % exc_value))
if not ignore and not handled:
messages.error(request, message or exc_value)
if redirect:
raise Http302(redirect)
if not escalate:
return NotFound # return to normal code flow
if issubclass(exc_type, RECOVERABLE):
wrap = True
if not force_silence and not handled and (not ignore or force_log):
# Default recoverable error to WARN log level
log_method = getattr(LOG, log_level or "warning")
log_method(error_color("Recoverable error: %s" % exc_value))
if not ignore and not handled:
messages.error(request, message or exc_value)
if redirect:
raise Http302(redirect)
if not escalate:
return RecoverableError # return to normal code flow
# If we've gotten here, time to wrap and/or raise our exception.
if wrap:
raise HandledException([exc_type, exc_value, exc_traceback])
raise exc_type, exc_value, exc_traceback
| apache-2.0 |
gooddata/openstack-nova | nova/db/sqlalchemy/api_migrations/migrate_repo/versions/018_instance_groups.py | 14 | 2633 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""API Database migrations for instance_groups"""
from migrate import UniqueConstraint
from sqlalchemy import Column
from sqlalchemy import DateTime
from sqlalchemy import ForeignKey
from sqlalchemy import Index
from sqlalchemy import Integer
from sqlalchemy import MetaData
from sqlalchemy import String
from sqlalchemy import Table
def upgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
groups = Table('instance_groups', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('user_id', String(length=255)),
Column('project_id', String(length=255)),
Column('uuid', String(length=36), nullable=False),
Column('name', String(length=255)),
UniqueConstraint('uuid',
name='uniq_instance_groups0uuid'),
mysql_engine='InnoDB',
mysql_charset='utf8',
)
groups.create(checkfirst=True)
group_policy = Table('instance_group_policy', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('policy', String(length=255)),
Column('group_id', Integer, ForeignKey('instance_groups.id'),
nullable=False),
Index('instance_group_policy_policy_idx', 'policy'),
mysql_engine='InnoDB',
mysql_charset='utf8',
)
group_policy.create(checkfirst=True)
group_member = Table('instance_group_member', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('instance_uuid', String(length=255)),
Column('group_id', Integer, ForeignKey('instance_groups.id'),
nullable=False),
Index('instance_group_member_instance_idx', 'instance_uuid'),
mysql_engine='InnoDB',
mysql_charset='utf8',
)
group_member.create(checkfirst=True)
| apache-2.0 |
Groupe24/CodeInSpace | tournoi/37/game_1.py | 1 | 62455 | # -*- coding: utf-8 -*-
import random
import shutil
import time
import socket
def game_preparation(file):
""" Prepare the data structures need to play the game based on a .mv file.
Return
------
- field: list of lists containing the characters contained in each cases of the board (list).
- redfleet: dictionnary of dictionnaries containing the data of the ships of the red player (dict).
- bluefleet: dictionnary of dictionnaries containing the data of the ships of the red player (dict).
- asteroids: list of dictionnaries containing the data of the ships of the red player (list).
Version
-------
- specification: Simon Defrenne (v.2 03/03/18)
- implementation: Simon Defrenne (v.2 03/03/18)
"""
# preparation work
fh = open(file,"r")
prep = fh.readlines()
# generate store
store = {"scout":{"health":3,"attack":1,"range":3,"cost":3},
"warship":{"health":18,"attack":3,"range":5,"cost":9},
"excavator-S":{"health":2,"tonnage":1,"cost":1},
"excavator-M":{"health":3,"tonnage":4,"cost":2},
"excavator-L":{"health":6,"tonnage":8,"cost":4}}
# generate fleets and portals
redfleet = {}
bluefleet = {}
redfleet["portal"] = {"type":"portal","health":100,"hitbox":[int(str.split(prep[3]," ")[0]),int(str.split(prep[3]," ")[1])],"ore":4,"locked":[],"identifiant":"R","score":0}
bluefleet["portal"] = {"type":"portal","health":100,"hitbox":[int(str.split(prep[4]," ")[0]),int(str.split(prep[4]," ")[1])],"ore":4,"locked":[],"identifiant":"B","score":0}
# generate asteroids
asteroids = []
asteroidsprep1 = prep[6:len(prep)]
for asteroid in asteroidsprep1:
asteroidsprep2 = str.split(asteroid," ")
asteroidsprep2[-1] = asteroidsprep2[-1].replace("\n","")
asteroidsprep3 = {"hitbox":[int(asteroidsprep2[0]),int(asteroidsprep2[1])],"ore":int(asteroidsprep2[2]),"harvest":int(asteroidsprep2[3]),"locked":[]}
asteroids.append(asteroidsprep3)
# stock the size of the map
mapsize = [int(str.split(prep[1]," ")[0])-1,int(str.split(prep[1]," ")[1])-1]
# cleaning work
fh.close
return redfleet,bluefleet,asteroids,store,mapsize
def manhattan_distance(case_1,case_2):
""" Calculate the distance between two case on the field.
Parameters
----------
- case_1 : a particular case on the field (list)
- case_2 : a particular case on the field (list)
Return
------
- distance: the distance between the two case (int)
Version
-------
- specification: Simon Defrenne (v.1 18/03/18)
- implementation: Simon Defrenne (v.1 18/03/18)
"""
x_p,y_p = coordinates(case_1)
x_e,y_e = coordinates(case_2)
x_p = int(x_p)
y_p = int(y_p)
x_e = int(x_e)
y_e = int(y_e)
x1,x2 = max(x_p,x_e),min(x_p,x_e)
y1,y2 = max(y_p,y_e),min(y_p,y_e)
distance = (x1 - x2) + (y1 - y2)
return distance
def hitbox(ship):
"""Calculate the hitbox of a ship based on its type the localisation of its center.
Parameters
----------
- ship: ship whose hitbox is asked for (str).
Returns
-------
- hitbox : list of coordinates that represent the ship (list).
Version
-------
- specification: Simon Defrenne (v.1 18/03/18)
- implementation: Simon Defrenne (v.1 18/03/18)
"""
ship_type = ship["type"]
x,y = coordinates(ship["hitbox"])
full_hitbox = []
full_hitbox.append([x,y])
if ship_type == "excavator-M" or ship_type == "excavator-L" or ship_type == "scout" or ship_type == "warship" or ship_type == "portal":
full_hitbox.append([x+1,y])
full_hitbox.append([x-1,y])
full_hitbox.append([x,y+1])
full_hitbox.append([x,y-1])
if ship_type == "excavator-L" or ship_type == "warship" or ship_type == "portal":
full_hitbox.append([x+2,y])
full_hitbox.append([x-2,y])
full_hitbox.append([x,y+2])
full_hitbox.append([x,y-2])
if ship_type == "scout" or ship_type == "warship" or ship_type == "portal":
full_hitbox.append([x+1,y+1])
full_hitbox.append([x-1,y-1])
full_hitbox.append([x+1,y-1])
full_hitbox.append([x-1,y+1])
if ship_type == "warship" or ship_type == "portal":
full_hitbox.append([x+1,y+2])
full_hitbox.append([x-1,y-2])
full_hitbox.append([x+1,y-2])
full_hitbox.append([x-1,y+2])
full_hitbox.append([x+2,y+1])
full_hitbox.append([x-2,y-1])
full_hitbox.append([x+2,y-1])
full_hitbox.append([x-2,y+1])
if ship_type == "portal":
full_hitbox.append([x+2,y+2])
full_hitbox.append([x-2,y-2])
full_hitbox.append([x+2,y-2])
full_hitbox.append([x-2,y+2])
return full_hitbox
def coordinates(list_coordinates):
""" Split a list of two numbers into one abscissa and one ordinate
Parameters
----------
- list_coordinates : list of two numbers, one abscissa and one ordinate (list)
Return
------
- x : rank (int)
- y : column (int)
Version
-------
- specification: Simon Defrenne (v.2 04/05/18)
- implementation: Simon Defrenne (v.1 03/03/18)
"""
x = list_coordinates[0]
y = list_coordinates[1]
return x,y
def attack(ship,target,fleet,enemy_fleet,store):
""" The attack of a ship against a target.
Parameters
----------
- ship : the name of the ship that attacks (dict)
- target : the targeted case (list)
- redfleet : the fleet of the red player (dict)
- bluefleet : the fleet of the blue player (dict)
- store: the data structure used to stock information on ships based on their types (dict)
Return
------
- redfleet : the fleet of the red player (dict)
- bluefleet : the fleet of the blue player (dict)
Version
-------
- specification: Simon Defrenne (v.1 18/03/18)
- implementation: Simon Defrenne (v.1 18/03/18)
"""
if ship in fleet:
if fleet[ship]["type"] == "scout" or fleet[ship]["type"] == "warship":
if manhattan_distance(fleet[ship]["hitbox"],target) <= store[fleet[ship]["type"]]["range"] and fleet[ship]["action"] == False:
target[0]= int(target[0])
target[1]= int(target[1])
for ships in fleet:
if target in hitbox(fleet[ships]):
fleet[ships]["health"] -= store[fleet[ship]["type"]]["attack"]
for ships in enemy_fleet:
if target in hitbox(enemy_fleet[ships]):
enemy_fleet[ships]["health"] -= store[fleet[ship]["type"]]["attack"]
fleet[ship]["action"] = True
return fleet,enemy_fleet
def buy(ship_name,ship_type,fleet,store):
""" Add a specific ship to the chosen fleet.
Parameters
----------
- ship_name : the chosen name of the ship (str)
- ship_type : the chosen type of the ship (str)
- fleet : the fleet in which a ship is added (dict)
- store: the data structure used to stock information on ships based on their types (dict)
Return
------
- fleet : the fleet in which a ship is added (dict)
Version
-------
- specification: Simon Defrenne (v.1 18/03/18)
- implementation: Simon Defrenne (v.1 18/03/18)
"""
if store[ship_type]["cost"] <= fleet["portal"]["ore"]:
fleet[ship_name] = {}
fleet[ship_name]["type"] = ship_type
fleet[ship_name]["health"] = store[ship_type]["health"]
fleet[ship_name]["hitbox"] = fleet["portal"]["hitbox"]
fleet[ship_name]["action"] = False
if "tonnage" in store[ship_type]:
fleet[ship_name]["tonnage"] = 0
fleet[ship_name]["lock"] = False
fleet["portal"]["ore"] -= store[ship_type]["cost"]
return fleet
def name_ships(ship_type,fleet):
""" Allows the IA to create names for a ship it will buy.
Parameters
----------
- ship_type : the chosen type of ship (str)
- fleet : the fleet in which a ship is added (dict)
Returns
-------
- ship_name: the name of the ship (str)
Version
-------
- specification: Simon Defrenne (v.1 18/03/18)
- implementation: Simon Defrenne, Marien (v.1 18/03/18)
"""
if ship_type == "scout":
ship_name_1 = "S"
elif ship_type == "warship":
ship_name_1 = "W"
elif ship_type == "excavator-S":
ship_name_1 = "E"
elif ship_type == "excavator-M":
ship_name_1 = "M"
elif ship_type == "excavator-L":
ship_name_1 = "L"
ship_name_1 = fleet["portal"]["identifiant"] + ship_name_1
right_name = False
ship_name_2 = 1
while not right_name:
ship_name = ship_name_1 + str(ship_name_2)
if ship_name in fleet:
ship_name_2 += 1
else:
right_name = True
return ship_name
def move(ship,fleet,target,mapsize):
""" Move a ship into the target destination.
Parameters
----------
- ship : the name of the ship that moves (dict)
- target : the targeted case (list)
- fleet : the fleet of player (dict)
- mapsize : list containing the number of rows and columns of the map (list)
Return
------
- fleet : the fleet of player (dict)
Version
-------
- specification: Simon Defrenne (v.1 23/03/18)
- implementation: Simon Defrenne (v.1 23/03/18)
"""
if ship in fleet:
movement = False
if manhattan_distance(fleet[ship]["hitbox"],target) <= 2:
if (not "tonnage" in fleet[ship]) or ("tonnage" in fleet[ship] and not fleet[ship]["lock"]):
s_type = fleet[ship]["type"]
if s_type == "warship" or s_type == "excavator-L":
if target[0] - 2 > 0:
movement = True
elif target[0] + 2 <= mapsize[0]:
movement = True
elif target[1] - 2 > 0:
movement = True
elif target[1] + 2 <= mapsize[1]:
movement = True
elif s_type == "scout" or s_type == "excavator-M":
if target[0] - 1 > 0:
movement = True
elif target[0] + 1 <= mapsize[0]:
movement = True
elif target[1] - 1 > 0:
movement = True
elif target[1] + 1 <= mapsize[1]:
movement = True
elif s_type == "excavator-S":
if target[0] > 0:
movement = True
elif target[0] <= mapsize[0]:
movement = True
elif target[1] > 0:
movement = True
elif target[1] <= mapsize[1]:
movement = True
if movement:
x_s,y_s = coordinates(fleet[ship]["hitbox"])
x_t,y_t = coordinates(target)
if (x_s == x_t or y_s == y_t) and manhattan_distance(fleet[ship]["hitbox"],target)==1:
fleet[ship]["hitbox"]=target
elif (x_s != x_t and y_s != y_t) and manhattan_distance(fleet[ship]["hitbox"],target)==2:
fleet[ship]["hitbox"]=target
fleet[ship]["action"] = True
return fleet
def locking(ship,order,fleet,asteroids):
""" Lock or unlock a excavator on a asteroid or its portal.
Parameters
----------
- ship : the name of the ship that locks/unlocks itself (dict)
- order : "lock" to lock a ship, "release" to unlock it (str)
- fleet : the fleet of the ship (dict)
- asteroids : list of asteroids (list)
Return
------
- fleet : the fleet of the ship (dict)
Version
-------
- specification: Simon Defrenne (v.1 18/03/18)
- implementation: Simon Defrenne (v.1 18/03/18)
"""
if ship in fleet:
if fleet[ship]["type"] == "excavator-S" or fleet[ship]["type"] == "excavator-M" or fleet[ship]["type"] == "excavator-L":
if order == "release":
fleet[ship]["lock"] = False
if fleet[ship]["hitbox"] == fleet["portal"]["hitbox"]:
fleet["portal"]["locked"].remove(ship)
else:
for asteroid in asteroids:
if fleet[ship]["hitbox"] == asteroid["hitbox"]:
asteroid["locked"].remove(ship)
elif order == "lock":
if fleet[ship]["hitbox"] == fleet["portal"]["hitbox"]:
fleet["portal"]["locked"].append(ship)
fleet[ship]["lock"] = True
else:
for asteroid in asteroids:
if fleet[ship]["hitbox"] == asteroid["hitbox"]:
fleet[ship]["lock"] = True
asteroid["locked"].append(ship)
return fleet,asteroids
def turn(order_r,order_b, redfleet, bluefleet, store, asteroids,mapsize):
""" Run a turn of the game based on the orders of the players.
Parameters
----------
- order_r : orders of the red player (str)
- order_b : orders of the blue player (str)
- redfleet : the fleet of the red player (dict)
- bluefleet : the fleet of the blue player (dict)
- asteroids : list of asteroids (list)
- store: the data structure used to stock information on ships based on their types (dict)
- mapsize : list containing the number of rows and columns of the map (list)
Return
------
- redfleet : the fleet of the red player (dict)
- bluefleet : the fleet of the blue player (dict)
- asteroids : list of asteroids (list)
Version
-------
- specification: Simon Defrenne (v.1 23/03/18)
- implementation: Simon Defrenne (v.1 23/03/18)
"""
# resolve every orders and the harvest of ore
r_attack_orders, r_move_orders, r_buy_orders, r_lock_orders = orders_prep(order_r)
b_attack_orders, b_move_orders, b_buy_orders, b_lock_orders = orders_prep(order_b)
redfleet = buy_resolution(r_buy_orders, redfleet,store)
bluefleet = buy_resolution(b_buy_orders, bluefleet,store)
redfleet,asteroids = locking_resolution(r_lock_orders, redfleet, asteroids)
bluefleet,asteroids = locking_resolution(b_lock_orders, bluefleet, asteroids)
redfleet = move_resolution(r_move_orders, redfleet,mapsize)
bluefleet = move_resolution(b_move_orders, bluefleet,mapsize)
redfleet,bluefleet = attack_resolution(r_attack_orders, redfleet, bluefleet, store, asteroids)
bluefleet,redfleet = attack_resolution(b_attack_orders, bluefleet, redfleet, store, asteroids)
if "portal" in redfleet and "portal" in bluefleet:
redfleet,bluefleet,asteroids = harvest(redfleet,bluefleet,asteroids,store)
# prepare the next turn
for ship in redfleet:
if redfleet[ship]["type"] != "portal":
redfleet[ship]["action"] = False
for ship in bluefleet:
if bluefleet[ship]["type"] != "portal":
bluefleet[ship]["action"] = False
return redfleet, bluefleet, asteroids
def orders_prep(list_orders):
""" Split the string of orders into four lists, based on the type of order.
Parameters
----------
List_orders: the sequence of order the player has given (str)
Return
------
attack_order: the attack orders (list)
move_order: the moving orders (list)
lock_order: the locking orders (list)
buy_order: the buying orders (list)
Version
-------
specification: Marien Dessy (v.1 10/04/18)
implementation: Simon Defrenne (v.1 04/04/18)
"""
list_orders = str.split(list_orders," ")
attack_orders = []
move_orders = []
buy_orders = []
lock_orders = []
for order in list_orders:
if "*" in order:
attack_orders.append(order)
elif "@" in order:
move_orders.append(order)
elif "lock" in order or "release" in order :
lock_orders.append(order)
elif "scout" in order or "warship" in order or "excavator-S" in order or "excavator-M" in order or "excavator-L" in order:
buy_orders.append(order)
return attack_orders, move_orders, buy_orders, lock_orders
def buy_resolution(buy_orders,fleet,store):
""" Resolve the buying orders of a player.
Parameters
----------
- buy_orders: the buying orders (list)
- fleet: the fleet of the player who give the order (dict)
- store: the data structure used to stock information on ships based on their types (dict
Return
------
- fleet: the fleet of the player who give the order (dict)
Version
-------
- specification: Marien Dessy (v.1 10/04/18)
- implementation: Simon Defrenne (v.1 04/04/18)
"""
for order in buy_orders:
fleet = buy(str.split(order,":")[0],str.split(order,":")[1],fleet,store)
return fleet
def locking_resolution(lock_orders,fleet,asteroids):
""" Resolve the locking orders of a player.
Parameters
----------
- lock_orders: the locking orders (list)
- fleet: the fleet of the player who give the order (dict)
- asteroids: the list of asteroids (list)
Return
------
- fleet: the fleet of the player who give the order (dict)
Version
-------
- specification: Marien Dessy (v.1 10/04/18)
- implementation: Simon Defrenne (v.1 04/04/18)
"""
for order in lock_orders:
fleet,asteroids = locking(str.split(order,":")[0],str.split(order,":")[1],fleet,asteroids)
return fleet,asteroids
def move_resolution(move_orders,fleet,mapsize):
""" Resolve the move orders of a player.
Parameters
----------
- move_orders: the buying orders (list)
- fleet: the fleet of the player who give the order (dict)
- mapsize : list containing the number of rows and columns of the map (list)
Return
------
- fleet: the fleet of the player who give the order (dict)
Version
-------
- specification: Marien Dessy (v.1 10/04/18)
- implementation: Simon Defrenne (v.1 04/04/18)
"""
for order in move_orders:
ship = str.split(order,":@")[0]
coordinates = str.split(order,":@")[1]
coordinates = [int(str.split(coordinates, "-")[0]),int(str.split(coordinates, "-")[1])]
fleet = move(ship,fleet,coordinates,mapsize)
return fleet
def attack_resolution(attack_orders,fleet,enemy_fleet,store,asteroids):
""" Resolve the attack orders of a player.
Parameters
----------
- buy_orders: the buying orders (list)
- fleet: the fleet of the player who give the order (dict)
- enemy_fleet: the fleet of the enemy of the preceding player (dict)
- asteroids: the list of asteroids (list)
Return
------
- fleet: the fleet of the player who give the order (dict)
- enemy_fleet: the fleet of the enemy of the preceding player (dict)
Version
-------
- specification: Marien Dessy (v.1 10/04/18)
- implementation: Simon Defrenne (v.1 04/04/18)
"""
for order in attack_orders:
ship = str.split(order,":*")[0]
coordinates_attack = str.split(order,":*")[1]
coordinates_attack_2 = str.split(coordinates_attack, "-")
coordinates = [coordinates_attack_2[0],coordinates_attack_2[1]]
fleet,enemy_fleet = attack(ship,coordinates,fleet,enemy_fleet,store)
# delete the destroyed ships
fleet_dead_ships = []
enemy_fleet_dead_ships = []
for ships in fleet:
if fleet[ships]["health"] <= 0:
fleet_dead_ships.append(ships)
for ships in enemy_fleet:
if enemy_fleet[ships]["health"] <= 0:
enemy_fleet_dead_ships.append(ships)
for ship in fleet_dead_ships:
if "lock" in fleet[ship] and fleet[ship]["lock"]:
if fleet[ship]["hitbox"] == fleet["portal"]["hitbox"] :
if ship in fleet["portal"]["locked"]:
index = fleet["portal"]["locked"].index(ship)
del fleet["portal"]["locked"][index]
else:
for asteroid in asteroids:
if ship in asteroid["locked"]:
index = asteroid["locked"].index(ship)
del asteroid["locked"][index]
del fleet[ship]
for ship in enemy_fleet_dead_ships:
if "lock" in enemy_fleet[ship] and enemy_fleet[ship]["lock"]:
if enemy_fleet[ship]["hitbox"] == enemy_fleet["portal"]["hitbox"] :
if ship in enemy_fleet["portal"]["locked"]:
index = enemy_fleet["portal"]["locked"].index(ship)
del enemy_fleet["portal"]["locked"][index]
else:
for asteroid in asteroids:
if ship in asteroid["locked"]:
index = asteroid["locked"].index(ship)
del asteroid["locked"][index]
del enemy_fleet[ship]
return fleet,enemy_fleet
def harvest(redfleet,bluefleet,asteroids,store):
""" Resolve the harvesting of locked ships.
Parameters
----------
- redfleet: the fleet of the red player (dict)
- bluefleet: the fleet of the blue player (dict)
- asteroids: the list of asteroids (list)
- store: the data structure used to stock information on ships based on their types (dict)
Return
------
- fleet: the fleet of the player who give the order (dict)
- enemy_fleet: the fleet of the enemy of the preceding player (dict)
Version
-------
- specification: Marien Dessy (v.1 10/04/18)
- implementation: Simon Defrenne (v.1 04/04/18)
"""
for asteroid in asteroids:
if asteroid["locked"] != []:
red_lock = []
blue_lock = []
for ship in asteroid["locked"]:
if ship in redfleet:
red_lock.append(ship)
elif ship in bluefleet:
blue_lock.append(ship)
potential_harvest = len(asteroid["locked"]) * asteroid["harvest"]
if potential_harvest > asteroid["ore"]:
potential_harvest = asteroid["ore"]
ship_harvest = potential_harvest/len(asteroid["locked"])
for ship in red_lock:
tonnage = store[redfleet[ship]["type"]]["tonnage"]
carried_weight = redfleet[ship]["tonnage"]
if tonnage - carried_weight < ship_harvest:
redfleet[ship]["tonnage"] += tonnage - carried_weight
asteroid["ore"] -= tonnage - carried_weight
else:
redfleet[ship]["tonnage"] += ship_harvest
asteroid["ore"] -= ship_harvest
for ship in blue_lock:
tonnage = store[bluefleet[ship]["type"]]["tonnage"]
carried_weight = bluefleet[ship]["tonnage"]
if tonnage - carried_weight < ship_harvest:
bluefleet[ship]["tonnage"] += tonnage - carried_weight
asteroid["ore"] -= tonnage - carried_weight
else:
bluefleet[ship]["tonnage"] += ship_harvest
asteroid["ore"] -= ship_harvest
for ship in redfleet["portal"]["locked"]:
redfleet["portal"]["ore"] += redfleet[ship]["tonnage"]
redfleet["portal"]["score"] += redfleet[ship]["tonnage"]
redfleet[ship]["tonnage"] -= redfleet[ship]["tonnage"]
for ship in bluefleet["portal"]["locked"]:
bluefleet["portal"]["ore"] += bluefleet[ship]["tonnage"]
bluefleet["portal"]["score"] += bluefleet[ship]["tonnage"]
bluefleet[ship]["tonnage"] -= bluefleet[ship]["tonnage"]
return redfleet,bluefleet,asteroids
def IA_buy(IA_fleet,enemy_fleet,store):
""" Make the IA buy a new ship and name it to add this ship in his fleet.
Parameters
----------
- IA_fleet: the fleet of the IA (dict)
- store: the database which contain all information about the ship's stats (dict)
- Return
------
- order: the buy order (str)
Version
-------
- specification: Marien Dessy (v.2 27/04/18)
- implementation: Marien Dessy, Simon Defrenne (v.4 04/05/18)
"""
ship_count = {}
ship_count["excavator-S"] = 0
ship_count["excavator-M"] = 0
ship_count["excavator-L"] = 0
ship_count["scout"] = 0
ship_count["warship"] = 0
for ship in IA_fleet:
if ship != "portal":
s_type = IA_fleet[ship]["type"]
ship_count[s_type] +=1
order = ""
buy = True
stock = IA_fleet["portal"]["ore"]
score = IA_fleet["portal"]["score"]
ship_to_buy = {}
while buy:
if ship_count["excavator-M"] < 2:
type_to_buy = "excavator-M"
ship_count["excavator-M"] += 1
stock -= 2
elif ship_count["scout"] < 2 and score >= 8 and stock >= 3:
type_to_buy = "scout"
ship_count["scout"] += 1
stock -= 3
elif score >= 16 and stock >=9:
type_to_buy = "warship"
stock -= 9
elif ship_count["excavator-S"] < 1 and score >= 8 and stock>=1:
type_to_buy = "excavator-S"
ship_count["excavator-S"] += 1
stock -= 1
elif ship_count["excavator-S"] < 2 and score >= 24 and stock>=1:
type_to_buy = "excavator-S"
ship_count["excavator-S"] += 1
stock -= 1
else:
buy = False
if buy:
name = name_ships(type_to_buy,IA_fleet)
while name in ship_to_buy:
name = name[0:2] + str(int(name[2:])+1)
ship_to_buy[name] = type_to_buy
for ship in ship_to_buy:
order += ship + ":" + ship_to_buy[ship] + " "
return order
def calculate_trajectory(test,objectives,choice=True):
""" Calculate the closest or furthest cases in a list from another defined case.
Parameters
----------
- test : the case that is compared (list)
- objective : the list in which we look for the closest case (list)
- choice : True for the closest cases, False for the furthest (bool)
Return
------
- target: one of the closest possibles points (list)
Version
-------
specification: Simon Defrenne (v.1 27/04/18)
implementation: Simon Defrenne (v.1 27/04/18)
"""
target = []
possible_distance = {}
tested_distance = None
for objective in objectives:
tested_distance = calculate_turn_distance(test,objective)
if not tested_distance in possible_distance:
possible_distance[tested_distance] = []
possible_distance[tested_distance].append(objective)
possible_distance_2 = None
for distance in possible_distance:
if choice:
if possible_distance_2 == None or possible_distance_2 > distance:
possible_distance_2 = distance
else:
if possible_distance_2 == None or possible_distance_2 < distance:
possible_distance_2 = distance
target = possible_distance[possible_distance_2]
return target
def calculate_turn_distance(case_1,case_2):
""" Calculate the number of required turns to go between two case on the field.
Parameters
----------
- case_1 : a particular case on the field (list)
- case_2 : a particular case on the field (list)
Return
------
- distance: the distance between the two case (int)
Version
-------
- specification: Simon Defrenne (v.1 18/03/18)
- implementation: Simon Defrenne (v.1 18/03/18)
"""
x1,y1 = coordinates(case_1)
x2,y2 = coordinates(case_2)
distance = max(max(x1-x2,x2-x1),max(y1-y2,y2-y1))
return distance
def overlay_trap(ship, IA_fleet,enemy_fleet,possible_moves,objectives):
""" Make a ship go into an opponent battleship
Parameters
----------
- ship: the ship not ordered yet (dict)
- IA_fleet: the fleet of the IA (dict)
- enemy_fleet : the fleet of the opposing player (dict)
- possibles_moves : doable moves by the ship (list)
- objectives : objectives of the ship (list)
Return
------
- objectives : objectives of the ship (list)
Version
-------
- specification: Simon Defrenne (v.1 06/05/18)
- implementation: Simon Defrenne (v.1 06/05/18)
"""
battleships = []
for e_ship in enemy_fleet:
if enemy_fleet[e_ship]["type"] == "scout" or enemy_fleet[e_ship]["type"] == "warship":
battleships.append(e_ship)
for e_ship in battleships:
overlay = False
if IA_fleet[ship]["hitbox"] in hitbox(enemy_fleet[e_ship]):
if not overlay:
objectives = []
overlay = True
for hitbox_s in hitbox(enemy_fleet[e_ship]):
if hitbox_s in possible_moves:
objectives.append(hitbox_s)
elif not overlay:
objectives.append(enemy_fleet[e_ship]["hitbox"])
if objectives == []:
objectives.append(enemy_fleet["portal"]["hitbox"])
return objectives
def IA_move(ship_dict,IA_fleet,enemy_fleet,asteroids,store,mapsize):
""" Generate move orders for a ship of the IA.
Parameters
----------
- ship_list: the list of ships not ordered yet (dict)
- IA_fleet: the fleet of the IA (dict)
- enemy_fleet : the fleet of the opposing player (dict)
- asteroids : the list of asteroids (list)
- store: the data structure used to stock information on ships based on their types (dict)
- mapsize : list containing the number of rows and columns of the map (list)
Return
------
- order: the move order (str)
Version
-------
specification: Marien Dessy, Simon Defrenne (v.3 04/05/18)
implementation: Marien Dessy, Simon Defrenne (v.4 06/05/18)
"""
order = ""
for ship in ship_dict:
# set up
x_s,y_s = IA_fleet[ship]["hitbox"]
possible_moves = []
possible_moves.append([x_s+1,y_s])
possible_moves.append([x_s+1,y_s+1])
possible_moves.append([x_s,y_s+1])
possible_moves.append([x_s-1,y_s])
possible_moves.append([x_s,y_s-1])
possible_moves.append([x_s-1,y_s-1])
possible_moves.append([x_s+1,y_s-1])
possible_moves.append([x_s-1,y_s+1])
objectives = []
# calculating objectives of each ship
if IA_fleet[ship]["type"] == "excavator-M" or IA_fleet[ship]["type"] == "excavator-L":
if IA_fleet[ship]["tonnage"] == 0:
for asteroid in asteroids:
if asteroid["ore"] > 0:
objectives.append(asteroid["hitbox"])
elif IA_fleet[ship]["tonnage"] == store[IA_fleet[ship]["type"]]["tonnage"]:
objectives.append(IA_fleet["portal"]["hitbox"])
elif IA_fleet[ship]["tonnage"] < store[IA_fleet[ship]["type"]]["tonnage"]:
tested_distance = 0
total_distance = 0
for asteroid in asteroids:
if asteroid["ore"] > 0:
distance_a_s = calculate_turn_distance(asteroid["hitbox"],IA_fleet[ship]["hitbox"])
distance_a_p = calculate_turn_distance(asteroid["hitbox"],IA_fleet["portal"]["hitbox"])
tested_distance = distance_a_s + distance_a_p
if tested_distance < total_distance:
total_distance = distance_a_s + distance_a_p
objectives = []
objectives.append(asteroid["hitbox"])
if objectives == []:
objectives = overlay_trap(ship, IA_fleet,enemy_fleet,possible_moves,objectives)
elif IA_fleet[ship]["type"] == "scout":
for e_ship in enemy_fleet:
if enemy_fleet[e_ship]["type"] == "excavator-M" or enemy_fleet[e_ship]["type"] == "excavator-L":
objectives.append(enemy_fleet[e_ship]["hitbox"])
if objectives == []:
x_p, y_p = enemy_fleet["portal"]["hitbox"]
objectives.append(x_p + 3, y_p)
objectives.append(x_p + 2, y_p + 1)
objectives.append(x_p + 2, y_p - 1)
objectives.append(x_p + 1, y_p + 2)
objectives.append(x_p + 1, y_p - 2)
objectives.append(x_p, y_p)
objectives.append(x_p, y_p)
objectives.append(x_p - 1, y_p + 2)
objectives.append(x_p - 1, y_p - 2)
objectives.append(x_p - 2, y_p + 1)
objectives.append(x_p - 2, y_p - 1)
objectives.append(x_p - 3, y_p)
elif IA_fleet[ship]["type"] == "warship":
objectives.append(enemy_fleet["portal"]["hitbox"])
elif IA_fleet[ship]["type"] == "excavator-S":
objectives = overlay_trap(ship, IA_fleet,enemy_fleet,possible_moves,objectives)
target = calculate_trajectory(IA_fleet[ship]["hitbox"],objectives)
target = random.choice(target)
possible_moves_2 = calculate_trajectory(target,possible_moves)
x_final,y_final = random.choice(possible_moves_2)
# correction of trajectory if needed
if x_final <= 0:
x_final += 2
elif x_final >= mapsize[0]:
x_final -= 2
if y_final <= 0:
y_final += 2
elif y_final >= mapsize[1]:
y_final -= 2
# adding the order the string
order += ship + ":@" + str(x_final) + "-" + str(y_final) + " "
# return the move order
return order
def target(ally_ship,enemy_fleet,store):
""" The Artificial Intelligence choose a target for one of its ships.
Parameters
----------
- ally_ship : the ship that is checked to choose a target (dict)
- enemy_fleet : the fleet of the enemy (dict)
- store: the data structure used to stock information on ships based on their types (dict)
Return
------
- target: the targeted case on which the ship shoots (list)
Version
-------
- specification: Simon Defrenne (v.1 09/03/18)
- implementation: Simon Defrenne, Marien Dessy (v.2 04/05/18)
"""
list_targets = []
target = []
s_range = store[ally_ship["type"]]["range"]
for ship in enemy_fleet:
distance = manhattan_distance(ally_ship["hitbox"],enemy_fleet[ship]["hitbox"])
center_check = distance <= s_range
type_check = enemy_fleet[ship]["type"] != "excavator-S"
if center_check and type_check:
list_targets.append(ship)
if list_targets != []:
health_test = None
for ship in list_targets:
if health_test == None or health_test < enemy_fleet[ship]["health"]:
health_test = enemy_fleet[ship]["health"]
target = enemy_fleet[ship]["hitbox"]
return target
return None
def IA_attack(ship_dict,IA_fleet,enemy_fleet,store):
""" The IA choose randomly a ship to attack.
Parameters
----------
- ship_dict: the dictionnary of ships not ordered yet (dict)
- IA_fleet: the fleet of the IA (dict)
- fleet: the fleet of the player (dict)
- store: the data structure used to stock information on ships based on their types (dict)
Return
------
- order: the attack orders
Version
-------
- specification: Marien Dessy (v.1 11/04/18)
- implementation: Marien Dessy, Simon Defrenne (v.1 11/04/18)
"""
order = ""
battleships = []
for ship in ship_dict:
if ship != "portal" and (not "tonnage" in IA_fleet[ship]):
battleships.append(ship)
for ship in battleships:
attacked_case = target(IA_fleet[ship],enemy_fleet,store)
if attacked_case != None and not(attacked_case in hitbox(IA_fleet[ship])):
order += str(ship + ":*" + str(attacked_case[0]) + "-" + str(attacked_case[1]) + " ")
return order
def IA_locking(ship_dict,IA_fleet,asteroids,store):
""" The IA choose randomly a ship to attack.
Parameters
----------
- ship_list: the list of ships not ordered yet (dict)
- IA_fleet: the fleet of the IA (dict)
- asteroids: the fleet of the player (dict)
- store: the data structure used to stock information on ships based on their types (dict)
Return
------
- order: the locking orders
Version
-------
- specification: Marien Dessy (v.1 11/04/18)
- implementation: Marien Dessy (v.1 11/04/18)
"""
excavators = {}
for ship in ship_dict:
if "tonnage" in IA_fleet[ship]:
excavators[ship] = IA_fleet[ship]
locked_excavators = {}
unlocked_excavators = {}
for ship in excavators:
if excavators[ship]["lock"]:
locked_excavators[ship]=excavators[ship]
else:
unlocked_excavators[ship]=excavators[ship]
order = ""
for ship in locked_excavators:
s_type = locked_excavators[ship]["type"]
if locked_excavators[ship]["tonnage"] == 0 and locked_excavators[ship]["hitbox"] == IA_fleet["portal"]["hitbox"]:
order += ship + ":release "
else :
for asteroid in asteroids:
if ship in asteroid["locked"]:
if asteroid["ore"] <= 0 or locked_excavators[ship]["tonnage"] == store[s_type]["tonnage"]:
order += ship + ":release "
for ship in unlocked_excavators:
s_type = unlocked_excavators[ship]["type"]
for asteroid in asteroids:
ore_check = asteroid["ore"] > 0
hitbox_check = unlocked_excavators[ship]["hitbox"] == asteroid["hitbox"]
tonnage_check = excavators[ship]["tonnage"] < store[s_type]["tonnage"]
if ore_check and hitbox_check and tonnage_check:
order += ship + ":lock "
if unlocked_excavators[ship]["hitbox"] == IA_fleet["portal"]["hitbox"] and excavators[ship]["tonnage"] > 0:
order += ship + ":lock "
return order
def IA_complete(fleet,enemy_fleet,asteroids,store,mapsize):
""" Generate the orders for the IA.
Parameters
----------
- fleet: the fleet that will be used by the IA (dict)
- enemy_fleet: the fleet of the enemy (dict)
- asteroids: the fleet of the player (dict)
- store: the data structure used to stock information on ships based on their types (dict)
Return
------
- order: the order given by the IA (str)
Version
-------
- specification: Marien Dessy (v.1 12/04/18)
"""
order = ""
ship_dict = {}
for ship in fleet:
if ship != "portal":
ship_dict[ship] = None
order += IA_buy(fleet,enemy_fleet,store)
order += IA_locking(ship_dict,fleet,asteroids,store)
for ship in fleet:
if ship in order:
del ship_dict[ship]
order += IA_attack(ship_dict,fleet,enemy_fleet,store)
for ship in fleet:
if ship in order and ship in ship_dict:
del ship_dict[ship]
order += IA_move(ship_dict,fleet,enemy_fleet,asteroids,store,mapsize)
order = str.strip(order)
return order
# Gui framework
# ==============================================================================
# framework for easy user interface creation.
# Canvas creation and printing.
# ------------------------------------------------------------------------------
# Create and print a canvas in the user console.
def create_canvas(width, height, enable_color = True):
"""
Create a new char canvas.
Parameters
----------
height: height of the game view (int).
width: width of the game view (int).
enable_color: enable color in the game view (bool)
Return
------
canvas: 2D ascii canvas (dic).
"""
# Initialize the canvas.
canvas = {'size': (width, height), 'color': enable_color, 'grid': {}}
# Create canvas's tiles.
for x in range(width):
for y in range(height):
canvas['grid'][(x,y)] = {'color':None, 'back_color':None, 'char':' '}
return canvas
def print_canvas(canvas, x = 0, y = 0):
"""
Print canvas in the terminal.
Parameters
----------
canvas: canvas to print on screen (dic).
(optional) x, y: coodinate in the terminal (int).
"""
canvas_width = canvas['size'][0]
canvas_height = canvas['size'][1]
# Hide and set cursor coordinates.
line = '\033[?25l'
for y in range(canvas_height):
for x in range(canvas_width):
# Get coordinate.
grid_item = canvas['grid'][(x,y)]
# Get coordinate information.
char = grid_item['char']
color = grid_item['color']
back_color = grid_item['back_color']
if (canvas['color']):
line = line + set_color(char, color, back_color)
else:
line = line + char
line += '\n'
# Print, remove the laste \n et reset the print cursor..
print(line[:-1] + '\033[?25h')
# Canvas drawing.
# ------------------------------------------------------------------------------
# All tools and brush to draw on the canvas.
def put(canvas, x, y, char, color = None, back_color = None):
"""
Put a character in the canvas.
Parameters
----------
canvas: canvas to draw in (dic).
x, y: coordinate of were to put the char (int).
char: char to put (str).
(optiona) color, back_color: color for the char (string).
Return
------
canvas: canvas with the char put on it (dic).
"""
# Check if the coordinate is in the bound of the canvas.
if x < canvas['size'][0] and x >= 0 and\
y < canvas['size'][1] and y >= 0:
# Put the char a the coordinate.
canvas['grid'][(x,y)]['char'] = char
canvas['grid'][(x,y)]['color'] = color
canvas['grid'][(x,y)]['back_color'] = back_color
return canvas
def put_ship(canvas, x, y, char, color = None, back_color = None):
"""
Put function, but for ships.
Parameters
----------
canvas: canvas to draw in (dic).
x, y: coordinate of were to put the char (int).
char: char to put (str).
(optiona) color, back_color: color for the char (string).
Return
------
canvas: canvas with the char put on it (dic).
"""
x -= 1
y -= 1
# Check if the coordinate is in the bound of the canvas.
if x < canvas['size'][0] and x >= 0 and\
y < canvas['size'][1] and y >= 0:
# Put the char a the coordinate.
canvas['grid'][(x,y)]['char'] = char
canvas['grid'][(x,y)]['color'] = color
canvas['grid'][(x,y)]['back_color'] = back_color
return canvas
def put_canvas(canvas, canvas_bis, x, y):
"""
Put a canvas in the canvas.
Parameters
----------
canvas: canvas to draw in (dic).
canvas_bis: canvas to put in the main canvas (dic).
x, y: coordinate of the canvas (int).
Return
------
canvas: the canvas with the other canvas on it (dic).
"""
for cx in range(canvas_bis['size'][0]):
for cy in range(canvas_bis['size'][1]):
char = canvas_bis['grid'][(cx, cy)]
canvas = put(canvas, cx + x, cy + y, char['char'], char['color'], char['back_color'])
return canvas
def put_window(canvas, window_content, title, x, y, style="double", color = None, back_color = None):
"""
Put a window with a windows content in the main canvas.
Parameters
----------
canvas: canvas to draw in (dic).
window_content: content of the window (dic).
title: title of the window (str).
x, y: coordinate of the window (int).
(optional) style: Style of the window (str).
Return
------
canvas: the canvas with the window on it (dic).
"""
c = create_canvas(window_content["size"][0] + 2, window_content["size"][1] + 2, True)
c = put_canvas(c, window_content, 1, 1)
c = put_box(c, 0, 0, window_content["size"][0] + 2, window_content["size"][1] + 2, style)
c = put_text(c, 1, 0, "| %s |" % title, color, back_color)
canvas = put_canvas(canvas, c, x, y)
return canvas
def put_box(canvas, x, y, width, height, mode = 'double', color = None, back_color = None):
"""
Put a box in the canvas.
Parameters
----------
canvas: canvas to draw in (dic).
x, y: coordinate of the rectangle (int).
width, height: size of the rectangle (int).
mode: double ou single line <'double'|'single'> (str).
color, back_color: color for the char (string).
Return
------
canvas: canvas whith the box (dic).
"""
rect_char = ()
if mode == 'double':
rect_char = ('═', '║', '╔', '╚', '╗', '╝')
elif mode == 'single':
rect_char = ('─', '│', '┌', '└', '┐', '┘')
# Put borders.
put_rectangle(canvas, x, y, width, 1, rect_char[0], color, back_color)
put_rectangle(canvas, x, y + height - 1, width, 1, rect_char[0], color, back_color)
put_rectangle(canvas, x, y, 1, height, rect_char[1], color, back_color)
put_rectangle(canvas, x + width - 1, y, 1, height, rect_char[1], color, back_color)
# Put corners.
put(canvas, x, y, rect_char[2], color, back_color)
put(canvas, x, y + height - 1, rect_char[3], color, back_color)
put(canvas, x + width - 1, y, rect_char[4], color, back_color)
put(canvas, x + width - 1, y + height - 1, rect_char[5], color, back_color)
return canvas
def put_rectangle(canvas, x, y, width, height, char, color = None, back_color = None):
"""
Put a filled rectangle in the canvas.
Parameters
----------
canvas: canvas to draw in (dic).
x, y: coordinate of the rectangle (int).
width, height: size of the rectangle (int).
color, back_color: color for the char (string).
Return
------
canvas: canvas whith the rectangle (dic).
"""
for w in range(width):
for h in range(height): canvas = put(canvas, x + w, y + h, char, color, back_color)
return canvas
def put_text(canvas, x, y, text, color = None, back_color = None):
"""
Put a text in the canvas.
Parameters
----------
canvas: canvas to draw in (dic).
x, y: coordinate of the string (int).
direction_x, direction_y: direction to draw the string (int).
Return
------
canvas: game view with the new string (dic).
Notes
-----
direction_x, direction_y: Muste be -1, 0 or 1.
"""
for char in text:
canvas = put(canvas, x, y, char, color, back_color)
x += 1
y += 0
return canvas
def set_color(text, foreground_color, background_color):
"""
Change the color of a text.
Parameters
----------
text: string to color (str).
fore_color: name of the foreground color (str).
back_color: name of the background color (str).
Return
------
colored_text: colored string (str).
Notes
-----
Colors: grey, red, green, yellow, blue, magenta, cyan, white.
ANSI color escape sequences: http://ascii-table.com/ansi-escape-sequences.php
"""
color = { 'grey': 0, 'red': 1, 'green': 2, 'yellow': 3, 'blue': 4, 'magenta': 5, 'cyan': 6, 'white': 7 }
reset = '\033[0m'
format_string = '\033[%dm%s'
if foreground_color is not None: text = format_string % (color[foreground_color] + 30, text)
if background_color is not None: text = format_string % (color[background_color] + 40, text)
text += reset
return text
def slide_animation(canvas_foreground, canvas_background):
"""
"""
out_canvas = create_canvas(canvas_background['size'][0], canvas_background['size'][1])
slide_value = 0
while slide_value <= canvas_background['size'][1]:
put_canvas(out_canvas, canvas_background, 0, 0)
put_canvas(out_canvas, canvas_foreground, 0, 0 - slide_value)
print_canvas(out_canvas)
slide_value += 10
def show_game(red_team, blue_team, asteroids, mapsize, store):
"""
show the UI for the game
parameters
----------
red_team: show the red team
blue_team: show the blue team
asteroids: show the asteroids
mapsize: give the size for the visual map
store: show the store
Version
-------
specification: Alexis Losenko (v.1 6/05/18)
implementation: Alexis Losenko(v.1 01/05/18)
"""
X = 0
Y = 1
tsize = shutil.get_terminal_size((90, 60))
tsize = (tsize[X]-1, tsize[Y]-1)
c = create_canvas(tsize[X] , tsize[Y])
game_window = create_canvas(*mapsize)
shop_window = create_canvas(20, len(store) * 2)
red_window = render_team_window(red_team)
blue_window = render_team_window(blue_team)
for asteroid in asteroids:
put_ship(game_window, *asteroid["hitbox"], "o", "magenta")
#Now, it's the position of Portal
for h in hitbox(blue_team["portal"]):
put_ship(game_window, *h, "◘", "blue")
for h in hitbox(red_team["portal"]):
put_ship(game_window, *h, "◘", "red")
#go to show every ship
for ship in red_team:
if ship != "portal":
for h in hitbox(red_team[ship]):
put_ship(game_window, *h, "■", "red")
put_ship(game_window, *red_team[ship]["hitbox"], "X", "red")
for ship in blue_team:
if ship != "portal":
for h in hitbox(blue_team[ship]):
put_ship(game_window, *h, "■", "blue")
put_ship(game_window, *blue_team[ship]["hitbox"], "X", "blue")
line = 0
for type in store:
name = type
type = store[type]
put_text(shop_window, 0, line * 2, name, "yellow")
if "excavator" in name:
put_text(shop_window, 0, line * 2+1, " P:%i T:%i" % (type["cost"], type["tonnage"]))
else:
put_text(shop_window, 0, line * 2+1, " P:%i A:%i" % (type["cost"], type["attack"]))
line += 1
origin = (tsize[X] // 2 - game_window["size"][X] // 2, tsize[Y] // 2 - game_window["size"][Y] // 2)
put_window(c, game_window, "Mining War", *origin)
put_window(c, shop_window, "Shop", origin[X] - red_window["size"][X] - 2, origin[Y] + red_window["size"][Y] + 2)
put_window(c, red_window, "Red", origin[X] - red_window["size"][X] - 2, origin[Y], "double", "red")
put_window(c, blue_window, "Blue", origin[X] + game_window["size"][X] + 2, origin[Y], "double", "blue")
print_canvas(c)
def render_team_window(team):
"""
show the text for each ships and detail
parameters
----------
team: take the information of each team to show you in window
return
------
return the game window
"""
X = 0
Y = 1
team_window = create_canvas(20, 20)
line = 0
for ship in team:
name = ship
if ship != "portal":
ship = team[ship]
position = ship["hitbox"]
if ( "excavator" in ship["type"]):
put_text(team_window, 0, line, "%s %i-%i T:%i H:%i" % (name, position[X] + 1, position[Y] + 1, ship["tonnage"], ship["health"]))
else:
put_text(team_window, 0, line, "%s %i-%i H:%i" % (name, position[X] + 1, position[Y] + 1, ship["health"]))
line += 1
return team_window
def get_IP():
"""Returns the IP of the computer where get_IP is called.
Returns
-------
computer_IP: IP of the computer where get_IP is called (str)
Notes
-----
If you have no internet connection, your IP will be 127.0.0.1.
This IP address refers to the local host, i.e. your computer.
"""
return socket.gethostbyname(socket.gethostname())
def connect_to_player(player_id, remote_IP='127.0.0.1', verbose=False):
"""Initialise communication with remote player.
Parameters
----------
player_id: player id of the remote player, 1 or 2 (int)
remote_IP: IP of the computer where remote player is (str, optional)
verbose: True only if connection progress must be displayed (bool, optional)
Returns
-------
connection: sockets to receive/send orders (tuple)
Notes
-----
Initialisation can take several seconds. The function only
returns after connection has been initialised by both players.
Use the default value of remote_IP if the remote player is running on
the same machine. Otherwise, indicate the IP where the other player
is running with remote_IP. On most systems, the IP of a computer
can be obtained by calling the get_IP function on that computer.
"""
# init verbose display
if verbose:
print('\n-------------------------------------------------------------')
# open socket (as server) to receive orders
socket_in = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
socket_in.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) # deal with a socket in TIME_WAIT state
if remote_IP == '127.0.0.1':
local_IP = '127.0.0.1'
else:
local_IP = get_IP()
local_port = 42000 + (3-player_id)
try:
if verbose:
print('binding on %s:%d to receive orders from player %d...' % (local_IP, local_port, player_id))
socket_in.bind((local_IP, local_port))
except:
local_port = 42000 + 100+ (3-player_id)
if verbose:
print(' referee detected, binding instead on %s:%d...' % (local_IP, local_port))
socket_in.bind((local_IP, local_port))
socket_in.listen(1)
if verbose:
print(' done -> now waiting for a connection on %s:%d\n' % (local_IP, local_port))
# open client socket used to send orders
socket_out = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
socket_out.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) # deal with a socket in TIME_WAIT state
remote_port = 42000 + player_id
connected = False
msg_shown = False
while not connected:
try:
if verbose and not msg_shown:
print('connecting on %s:%d to send orders to player %d...' % (remote_IP, remote_port, player_id))
socket_out.connect((remote_IP, remote_port))
connected = True
if verbose:
print(' done -> now sending orders to player %d on %s:%d' % (player_id, remote_IP, remote_port))
except:
if verbose and not msg_shown:
print(' connection failed -> will try again every 100 msec...')
time.sleep(.1)
msg_shown = True
if verbose:
print()
# accept connection to the server socket to receive orders from remote player
socket_in, remote_address = socket_in.accept()
if verbose:
print('now listening to orders from player %d' % (player_id))
# end verbose display
if verbose:
print('\nconnection to remote player %d successful\n-------------------------------------------------------------\n' % player_id)
# return sockets for further use
return (socket_in, socket_out)
def disconnect_from_player(connection):
"""End communication with remote player.
Parameters
----------
connection: sockets to receive/send orders (tuple)
"""
# get sockets
socket_in = connection[0]
socket_out = connection[1]
# shutdown sockets
socket_in.shutdown(socket.SHUT_RDWR)
socket_out.shutdown(socket.SHUT_RDWR)
# close sockets
socket_in.close()
socket_out.close()
def notify_remote_orders(connection, orders):
"""Notifies orders of the local player to a remote player.
Parameters
----------
connection: sockets to receive/send orders (tuple)
orders: orders of the local player (str)
Raises
------
IOError: if remote player cannot be reached
"""
# get sockets
socket_in = connection[0]
socket_out = connection[1]
# deal with null orders (empty string)
if orders == '':
orders = 'null'
# send orders
try:
socket_out.sendall(orders.encode())
except:
raise IOError('remote player cannot be reached')
def get_remote_orders(connection):
"""Returns orders from a remote player.
Parameters
----------
connection: sockets to receive/send orders (tuple)
Returns
----------
player_orders: orders given by remote player (str)
Raises
------
IOError: if remote player cannot be reached
"""
# get sockets
socket_in = connection[0]
socket_out = connection[1]
# receive orders
try:
orders = socket_in.recv(65536).decode()
except:
raise IOError('remote player cannot be reached')
# deal with null orders
if orders == 'null':
orders = ''
return orders
def game(path, player_id, remote_IP='127.0.0.1', verbose=False):
""" Run the game.
Parameters
----------
path: the file of the map (str)
Version
-------
specification: Simon Defrenne (v.1 20/04/18)
implementation: Simon Defrenne, Marien Dessy, Alexis Losenko (v.1 20-04-18)
"""
redfleet,bluefleet,asteroids,store,mapsize = game_preparation(path)
turn_count = 0
game = True
no_damage_check = False
no_damage_count = 0
connection = connect_to_player(player_id, remote_IP, verbose)
while game:
redfleet_health_data = {}
for ship in redfleet:
redfleet_health_data[ship] = redfleet[ship]["health"]
bluefleet_health_data = {}
for ship in bluefleet:
bluefleet_health_data[ship] = bluefleet[ship]["health"]
if player_id == 2:
player_red = IA_complete(redfleet,bluefleet,asteroids,store,mapsize)
notify_remote_orders(connection,player_red)
player_blue = get_remote_orders(connection)
elif player_id == 1:
player_blue = IA_complete(bluefleet,redfleet,asteroids,store,mapsize)
player_red = get_remote_orders(connection)
notify_remote_orders(connection,player_blue)
redfleet, bluefleet, asteroids = turn(player_red, player_blue, redfleet, bluefleet, store, asteroids,mapsize)
turn_count += 1
show_game(redfleet, bluefleet, asteroids, mapsize, store)
# check if something has been damaged
for ship in redfleet:
if ship in redfleet_health_data:
if redfleet_health_data[ship] != redfleet[ship]["health"]:
no_damage_check = True
for ship in bluefleet:
if ship in bluefleet_health_data:
if bluefleet_health_data[ship] != bluefleet[ship]["health"]:
no_damage_check = True
if no_damage_check:
no_damage_count += 1
else:
no_damage_count = 0
# win condition check
if not "portal" in redfleet:
game = False
print("Red player wins.")
disconnect_from_player(connection)
elif not "portal" in bluefleet:
print("Blue player wins.")
game = False
disconnect_from_player(connection)
elif no_damage_count >= 200:
if redfleet["portal"]["health"] > bluefleet["portal"]["health"]:
print("Red player wins.")
elif redfleet["portal"]["health"] < bluefleet["portal"]["health"]:
print("Blue player wins.")
else:
if redfleet["portal"]["score"] > bluefleet["portal"]["score"]:
print("Red player wins.")
elif redfleet["portal"]["score"] < bluefleet["portal"]["score"]:
print("Blue player wins.")
else:
print("DRAW")
game = False
disconnect_from_player(connection)
time.sleep(0.5)
| mit |
allen-fdes/python_demo | cookbook/settings.py | 1 | 2691 | """
Django settings for cookbook project.
Generated by 'django-admin startproject' using Django 1.8.3.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '547bugh8va($^u5aq+wo12ua0ll_k!rnxj!$)wf*uj+jo$vl+w'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'polls',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
)
ROOT_URLCONF = 'cookbook.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR,'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'cookbook.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.8/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
STATIC_URL = '/static/'
| mit |
bpgc-cte/python2017 | Week 7/django/lib/python3.6/site-packages/pip/_vendor/cachecontrol/serialize.py | 326 | 6536 | import base64
import io
import json
import zlib
from pip._vendor.requests.structures import CaseInsensitiveDict
from .compat import HTTPResponse, pickle, text_type
def _b64_encode_bytes(b):
return base64.b64encode(b).decode("ascii")
def _b64_encode_str(s):
return _b64_encode_bytes(s.encode("utf8"))
def _b64_encode(s):
if isinstance(s, text_type):
return _b64_encode_str(s)
return _b64_encode_bytes(s)
def _b64_decode_bytes(b):
return base64.b64decode(b.encode("ascii"))
def _b64_decode_str(s):
return _b64_decode_bytes(s).decode("utf8")
class Serializer(object):
def dumps(self, request, response, body=None):
response_headers = CaseInsensitiveDict(response.headers)
if body is None:
body = response.read(decode_content=False)
# NOTE: 99% sure this is dead code. I'm only leaving it
# here b/c I don't have a test yet to prove
# it. Basically, before using
# `cachecontrol.filewrapper.CallbackFileWrapper`,
# this made an effort to reset the file handle. The
# `CallbackFileWrapper` short circuits this code by
# setting the body as the content is consumed, the
# result being a `body` argument is *always* passed
# into cache_response, and in turn,
# `Serializer.dump`.
response._fp = io.BytesIO(body)
data = {
"response": {
"body": _b64_encode_bytes(body),
"headers": dict(
(_b64_encode(k), _b64_encode(v))
for k, v in response.headers.items()
),
"status": response.status,
"version": response.version,
"reason": _b64_encode_str(response.reason),
"strict": response.strict,
"decode_content": response.decode_content,
},
}
# Construct our vary headers
data["vary"] = {}
if "vary" in response_headers:
varied_headers = response_headers['vary'].split(',')
for header in varied_headers:
header = header.strip()
data["vary"][header] = request.headers.get(header, None)
# Encode our Vary headers to ensure they can be serialized as JSON
data["vary"] = dict(
(_b64_encode(k), _b64_encode(v) if v is not None else v)
for k, v in data["vary"].items()
)
return b",".join([
b"cc=2",
zlib.compress(
json.dumps(
data, separators=(",", ":"), sort_keys=True,
).encode("utf8"),
),
])
def loads(self, request, data):
# Short circuit if we've been given an empty set of data
if not data:
return
# Determine what version of the serializer the data was serialized
# with
try:
ver, data = data.split(b",", 1)
except ValueError:
ver = b"cc=0"
# Make sure that our "ver" is actually a version and isn't a false
# positive from a , being in the data stream.
if ver[:3] != b"cc=":
data = ver + data
ver = b"cc=0"
# Get the version number out of the cc=N
ver = ver.split(b"=", 1)[-1].decode("ascii")
# Dispatch to the actual load method for the given version
try:
return getattr(self, "_loads_v{0}".format(ver))(request, data)
except AttributeError:
# This is a version we don't have a loads function for, so we'll
# just treat it as a miss and return None
return
def prepare_response(self, request, cached):
"""Verify our vary headers match and construct a real urllib3
HTTPResponse object.
"""
# Special case the '*' Vary value as it means we cannot actually
# determine if the cached response is suitable for this request.
if "*" in cached.get("vary", {}):
return
# Ensure that the Vary headers for the cached response match our
# request
for header, value in cached.get("vary", {}).items():
if request.headers.get(header, None) != value:
return
body_raw = cached["response"].pop("body")
headers = CaseInsensitiveDict(data=cached['response']['headers'])
if headers.get('transfer-encoding', '') == 'chunked':
headers.pop('transfer-encoding')
cached['response']['headers'] = headers
try:
body = io.BytesIO(body_raw)
except TypeError:
# This can happen if cachecontrol serialized to v1 format (pickle)
# using Python 2. A Python 2 str(byte string) will be unpickled as
# a Python 3 str (unicode string), which will cause the above to
# fail with:
#
# TypeError: 'str' does not support the buffer interface
body = io.BytesIO(body_raw.encode('utf8'))
return HTTPResponse(
body=body,
preload_content=False,
**cached["response"]
)
def _loads_v0(self, request, data):
# The original legacy cache data. This doesn't contain enough
# information to construct everything we need, so we'll treat this as
# a miss.
return
def _loads_v1(self, request, data):
try:
cached = pickle.loads(data)
except ValueError:
return
return self.prepare_response(request, cached)
def _loads_v2(self, request, data):
try:
cached = json.loads(zlib.decompress(data).decode("utf8"))
except ValueError:
return
# We need to decode the items that we've base64 encoded
cached["response"]["body"] = _b64_decode_bytes(
cached["response"]["body"]
)
cached["response"]["headers"] = dict(
(_b64_decode_str(k), _b64_decode_str(v))
for k, v in cached["response"]["headers"].items()
)
cached["response"]["reason"] = _b64_decode_str(
cached["response"]["reason"],
)
cached["vary"] = dict(
(_b64_decode_str(k), _b64_decode_str(v) if v is not None else v)
for k, v in cached["vary"].items()
)
return self.prepare_response(request, cached)
| mit |
abaditsegay/arangodb | 3rdParty/V8-4.3.61/third_party/python_26/Lib/heapq.py | 49 | 15994 | # -*- coding: Latin-1 -*-
"""Heap queue algorithm (a.k.a. priority queue).
Heaps are arrays for which a[k] <= a[2*k+1] and a[k] <= a[2*k+2] for
all k, counting elements from 0. For the sake of comparison,
non-existing elements are considered to be infinite. The interesting
property of a heap is that a[0] is always its smallest element.
Usage:
heap = [] # creates an empty heap
heappush(heap, item) # pushes a new item on the heap
item = heappop(heap) # pops the smallest item from the heap
item = heap[0] # smallest item on the heap without popping it
heapify(x) # transforms list into a heap, in-place, in linear time
item = heapreplace(heap, item) # pops and returns smallest item, and adds
# new item; the heap size is unchanged
Our API differs from textbook heap algorithms as follows:
- We use 0-based indexing. This makes the relationship between the
index for a node and the indexes for its children slightly less
obvious, but is more suitable since Python uses 0-based indexing.
- Our heappop() method returns the smallest item, not the largest.
These two make it possible to view the heap as a regular Python list
without surprises: heap[0] is the smallest item, and heap.sort()
maintains the heap invariant!
"""
# Original code by Kevin O'Connor, augmented by Tim Peters and Raymond Hettinger
__about__ = """Heap queues
[explanation by François Pinard]
Heaps are arrays for which a[k] <= a[2*k+1] and a[k] <= a[2*k+2] for
all k, counting elements from 0. For the sake of comparison,
non-existing elements are considered to be infinite. The interesting
property of a heap is that a[0] is always its smallest element.
The strange invariant above is meant to be an efficient memory
representation for a tournament. The numbers below are `k', not a[k]:
0
1 2
3 4 5 6
7 8 9 10 11 12 13 14
15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30
In the tree above, each cell `k' is topping `2*k+1' and `2*k+2'. In
an usual binary tournament we see in sports, each cell is the winner
over the two cells it tops, and we can trace the winner down the tree
to see all opponents s/he had. However, in many computer applications
of such tournaments, we do not need to trace the history of a winner.
To be more memory efficient, when a winner is promoted, we try to
replace it by something else at a lower level, and the rule becomes
that a cell and the two cells it tops contain three different items,
but the top cell "wins" over the two topped cells.
If this heap invariant is protected at all time, index 0 is clearly
the overall winner. The simplest algorithmic way to remove it and
find the "next" winner is to move some loser (let's say cell 30 in the
diagram above) into the 0 position, and then percolate this new 0 down
the tree, exchanging values, until the invariant is re-established.
This is clearly logarithmic on the total number of items in the tree.
By iterating over all items, you get an O(n ln n) sort.
A nice feature of this sort is that you can efficiently insert new
items while the sort is going on, provided that the inserted items are
not "better" than the last 0'th element you extracted. This is
especially useful in simulation contexts, where the tree holds all
incoming events, and the "win" condition means the smallest scheduled
time. When an event schedule other events for execution, they are
scheduled into the future, so they can easily go into the heap. So, a
heap is a good structure for implementing schedulers (this is what I
used for my MIDI sequencer :-).
Various structures for implementing schedulers have been extensively
studied, and heaps are good for this, as they are reasonably speedy,
the speed is almost constant, and the worst case is not much different
than the average case. However, there are other representations which
are more efficient overall, yet the worst cases might be terrible.
Heaps are also very useful in big disk sorts. You most probably all
know that a big sort implies producing "runs" (which are pre-sorted
sequences, which size is usually related to the amount of CPU memory),
followed by a merging passes for these runs, which merging is often
very cleverly organised[1]. It is very important that the initial
sort produces the longest runs possible. Tournaments are a good way
to that. If, using all the memory available to hold a tournament, you
replace and percolate items that happen to fit the current run, you'll
produce runs which are twice the size of the memory for random input,
and much better for input fuzzily ordered.
Moreover, if you output the 0'th item on disk and get an input which
may not fit in the current tournament (because the value "wins" over
the last output value), it cannot fit in the heap, so the size of the
heap decreases. The freed memory could be cleverly reused immediately
for progressively building a second heap, which grows at exactly the
same rate the first heap is melting. When the first heap completely
vanishes, you switch heaps and start a new run. Clever and quite
effective!
In a word, heaps are useful memory structures to know. I use them in
a few applications, and I think it is good to keep a `heap' module
around. :-)
--------------------
[1] The disk balancing algorithms which are current, nowadays, are
more annoying than clever, and this is a consequence of the seeking
capabilities of the disks. On devices which cannot seek, like big
tape drives, the story was quite different, and one had to be very
clever to ensure (far in advance) that each tape movement will be the
most effective possible (that is, will best participate at
"progressing" the merge). Some tapes were even able to read
backwards, and this was also used to avoid the rewinding time.
Believe me, real good tape sorts were quite spectacular to watch!
From all times, sorting has always been a Great Art! :-)
"""
__all__ = ['heappush', 'heappop', 'heapify', 'heapreplace', 'merge',
'nlargest', 'nsmallest', 'heappushpop']
from itertools import islice, repeat, count, imap, izip, tee
from operator import itemgetter, neg
import bisect
def heappush(heap, item):
"""Push item onto heap, maintaining the heap invariant."""
heap.append(item)
_siftdown(heap, 0, len(heap)-1)
def heappop(heap):
"""Pop the smallest item off the heap, maintaining the heap invariant."""
lastelt = heap.pop() # raises appropriate IndexError if heap is empty
if heap:
returnitem = heap[0]
heap[0] = lastelt
_siftup(heap, 0)
else:
returnitem = lastelt
return returnitem
def heapreplace(heap, item):
"""Pop and return the current smallest value, and add the new item.
This is more efficient than heappop() followed by heappush(), and can be
more appropriate when using a fixed-size heap. Note that the value
returned may be larger than item! That constrains reasonable uses of
this routine unless written as part of a conditional replacement:
if item > heap[0]:
item = heapreplace(heap, item)
"""
returnitem = heap[0] # raises appropriate IndexError if heap is empty
heap[0] = item
_siftup(heap, 0)
return returnitem
def heappushpop(heap, item):
"""Fast version of a heappush followed by a heappop."""
if heap and heap[0] < item:
item, heap[0] = heap[0], item
_siftup(heap, 0)
return item
def heapify(x):
"""Transform list into a heap, in-place, in O(len(heap)) time."""
n = len(x)
# Transform bottom-up. The largest index there's any point to looking at
# is the largest with a child index in-range, so must have 2*i + 1 < n,
# or i < (n-1)/2. If n is even = 2*j, this is (2*j-1)/2 = j-1/2 so
# j-1 is the largest, which is n//2 - 1. If n is odd = 2*j+1, this is
# (2*j+1-1)/2 = j so j-1 is the largest, and that's again n//2-1.
for i in reversed(xrange(n//2)):
_siftup(x, i)
def nlargest(n, iterable):
"""Find the n largest elements in a dataset.
Equivalent to: sorted(iterable, reverse=True)[:n]
"""
it = iter(iterable)
result = list(islice(it, n))
if not result:
return result
heapify(result)
_heappushpop = heappushpop
for elem in it:
heappushpop(result, elem)
result.sort(reverse=True)
return result
def nsmallest(n, iterable):
"""Find the n smallest elements in a dataset.
Equivalent to: sorted(iterable)[:n]
"""
if hasattr(iterable, '__len__') and n * 10 <= len(iterable):
# For smaller values of n, the bisect method is faster than a minheap.
# It is also memory efficient, consuming only n elements of space.
it = iter(iterable)
result = sorted(islice(it, 0, n))
if not result:
return result
insort = bisect.insort
pop = result.pop
los = result[-1] # los --> Largest of the nsmallest
for elem in it:
if los <= elem:
continue
insort(result, elem)
pop()
los = result[-1]
return result
# An alternative approach manifests the whole iterable in memory but
# saves comparisons by heapifying all at once. Also, saves time
# over bisect.insort() which has O(n) data movement time for every
# insertion. Finding the n smallest of an m length iterable requires
# O(m) + O(n log m) comparisons.
h = list(iterable)
heapify(h)
return map(heappop, repeat(h, min(n, len(h))))
# 'heap' is a heap at all indices >= startpos, except possibly for pos. pos
# is the index of a leaf with a possibly out-of-order value. Restore the
# heap invariant.
def _siftdown(heap, startpos, pos):
newitem = heap[pos]
# Follow the path to the root, moving parents down until finding a place
# newitem fits.
while pos > startpos:
parentpos = (pos - 1) >> 1
parent = heap[parentpos]
if newitem < parent:
heap[pos] = parent
pos = parentpos
continue
break
heap[pos] = newitem
# The child indices of heap index pos are already heaps, and we want to make
# a heap at index pos too. We do this by bubbling the smaller child of
# pos up (and so on with that child's children, etc) until hitting a leaf,
# then using _siftdown to move the oddball originally at index pos into place.
#
# We *could* break out of the loop as soon as we find a pos where newitem <=
# both its children, but turns out that's not a good idea, and despite that
# many books write the algorithm that way. During a heap pop, the last array
# element is sifted in, and that tends to be large, so that comparing it
# against values starting from the root usually doesn't pay (= usually doesn't
# get us out of the loop early). See Knuth, Volume 3, where this is
# explained and quantified in an exercise.
#
# Cutting the # of comparisons is important, since these routines have no
# way to extract "the priority" from an array element, so that intelligence
# is likely to be hiding in custom __cmp__ methods, or in array elements
# storing (priority, record) tuples. Comparisons are thus potentially
# expensive.
#
# On random arrays of length 1000, making this change cut the number of
# comparisons made by heapify() a little, and those made by exhaustive
# heappop() a lot, in accord with theory. Here are typical results from 3
# runs (3 just to demonstrate how small the variance is):
#
# Compares needed by heapify Compares needed by 1000 heappops
# -------------------------- --------------------------------
# 1837 cut to 1663 14996 cut to 8680
# 1855 cut to 1659 14966 cut to 8678
# 1847 cut to 1660 15024 cut to 8703
#
# Building the heap by using heappush() 1000 times instead required
# 2198, 2148, and 2219 compares: heapify() is more efficient, when
# you can use it.
#
# The total compares needed by list.sort() on the same lists were 8627,
# 8627, and 8632 (this should be compared to the sum of heapify() and
# heappop() compares): list.sort() is (unsurprisingly!) more efficient
# for sorting.
def _siftup(heap, pos):
endpos = len(heap)
startpos = pos
newitem = heap[pos]
# Bubble up the smaller child until hitting a leaf.
childpos = 2*pos + 1 # leftmost child position
while childpos < endpos:
# Set childpos to index of smaller child.
rightpos = childpos + 1
if rightpos < endpos and not heap[childpos] < heap[rightpos]:
childpos = rightpos
# Move the smaller child up.
heap[pos] = heap[childpos]
pos = childpos
childpos = 2*pos + 1
# The leaf at pos is empty now. Put newitem there, and bubble it up
# to its final resting place (by sifting its parents down).
heap[pos] = newitem
_siftdown(heap, startpos, pos)
# If available, use C implementation
try:
from _heapq import heappush, heappop, heapify, heapreplace, nlargest, nsmallest, heappushpop
except ImportError:
pass
def merge(*iterables):
'''Merge multiple sorted inputs into a single sorted output.
Similar to sorted(itertools.chain(*iterables)) but returns a generator,
does not pull the data into memory all at once, and assumes that each of
the input streams is already sorted (smallest to largest).
>>> list(merge([1,3,5,7], [0,2,4,8], [5,10,15,20], [], [25]))
[0, 1, 2, 3, 4, 5, 5, 7, 8, 10, 15, 20, 25]
'''
_heappop, _heapreplace, _StopIteration = heappop, heapreplace, StopIteration
h = []
h_append = h.append
for itnum, it in enumerate(map(iter, iterables)):
try:
next = it.next
h_append([next(), itnum, next])
except _StopIteration:
pass
heapify(h)
while 1:
try:
while 1:
v, itnum, next = s = h[0] # raises IndexError when h is empty
yield v
s[0] = next() # raises StopIteration when exhausted
_heapreplace(h, s) # restore heap condition
except _StopIteration:
_heappop(h) # remove empty iterator
except IndexError:
return
# Extend the implementations of nsmallest and nlargest to use a key= argument
_nsmallest = nsmallest
def nsmallest(n, iterable, key=None):
"""Find the n smallest elements in a dataset.
Equivalent to: sorted(iterable, key=key)[:n]
"""
if key is None:
it = izip(iterable, count()) # decorate
result = _nsmallest(n, it)
return map(itemgetter(0), result) # undecorate
in1, in2 = tee(iterable)
it = izip(imap(key, in1), count(), in2) # decorate
result = _nsmallest(n, it)
return map(itemgetter(2), result) # undecorate
_nlargest = nlargest
def nlargest(n, iterable, key=None):
"""Find the n largest elements in a dataset.
Equivalent to: sorted(iterable, key=key, reverse=True)[:n]
"""
if key is None:
it = izip(iterable, imap(neg, count())) # decorate
result = _nlargest(n, it)
return map(itemgetter(0), result) # undecorate
in1, in2 = tee(iterable)
it = izip(imap(key, in1), imap(neg, count()), in2) # decorate
result = _nlargest(n, it)
return map(itemgetter(2), result) # undecorate
if __name__ == "__main__":
# Simple sanity test
heap = []
data = [1, 3, 5, 7, 9, 2, 4, 6, 8, 0]
for item in data:
heappush(heap, item)
sort = []
while heap:
sort.append(heappop(heap))
print sort
import doctest
doctest.testmod()
| apache-2.0 |
Aderemi/Artificial-Intelligence | search_algo.py | 1 | 8943 | from collections import deque
from math import sqrt
import sys
import copy
import bisect
class Problem:
def __init__(self, initial_board, goal_board):
self.initial_board = Board(initial_board)
self.goal_board = Board(goal_board)
def actions(self, board):
#UDLR
possible_moves = [["Down", "Right"],
["Down", "Left", "Right"],
["Left", "Down"],
["Up", "Down", "Right"],
["Up", "Down", "Left", "Right"],
["Up", "Down", "Left"],
["Up", "Right"],
["Up", "Left", "Right"],
["Up", "Left"]]
return possible_moves[board.state.index("0")]
def result(self, board, action):
if action == "Left":
return board.move("Left")
elif action == "Right":
return board.move("Right")
elif action == "Down":
return board.move("Down")
elif action == "Up":
return board.move("Up")
def goalTest(self, board):
return board == self.goal_board
def pathCost(self, cost, board_now, action, next_board):
return cost + 1
class Board:
def __init__(self, state, parent = None, action = None, path_cost = 0):
self.state = copy.copy(state)
self.parent = parent
self.action = action
self.path_cost = path_cost
self.depth = 0
if parent:
self.depth = parent.depth + 1
def __eq__(self, other):
return isinstance(other, Board) and self.state == other.state
def __str__(self):
return "<| Board Items: {} |>".format(self.state)
def __lt__(self, node):
return self.path_cost < node.path_cost
def __hash__(self):
return hash((",").join(self.state))
def swapStateContent(self, empty_pos, new_pos):
new_pos_holder = self.state[new_pos]
self.state[new_pos] = "0"
self.state[empty_pos] = new_pos_holder
return self
def move(self, direction):
empty_pos = self.state.index("0")
up_down_gauge = int(sqrt(len(self.state)))
if direction == "Left":
new_pos = empty_pos - 1
return self.swapStateContent(empty_pos, new_pos)
elif direction == "Right":
new_pos = empty_pos + 1
return self.swapStateContent(empty_pos, new_pos)
elif direction == "Up":
new_pos = empty_pos - up_down_gauge
return self.swapStateContent(empty_pos, new_pos)
elif direction == "Down":
new_pos = empty_pos + up_down_gauge
return self.swapStateContent(empty_pos, new_pos)
def expand(self, problem):
m_list = set()
for action in problem.actions(self):
child = self.childBoard(problem, action)
m_list.add(child)
#print(child.state, action)
return m_list
def childBoard(self, problem, action):
my_copy = Board(self.state, self.parent, self.action, self.path_cost)
next_board = problem.result(my_copy, action)
return Board(next_board.state, self, action, problem.pathCost(self.path_cost, self.state, action, next_board.state))
def traceBack(self):
board, parent_n_granies = self, [self]
while board.parent:
parent_n_granies.append(board)
board = board.parent
return parent_n_granies;
def solution(self, string = False):
solution_actions = [board.action for board in self.traceBack()]
return ",".join(solution_actions) if string else solution_actions
class QueueType:
def __init__(self, items=[], length = None):
self.Queue = deque(items, length)
def __len__(self):
return len(self.Queue)
def __contains__(self, item):
return item in self.Queue
def pop(self):
if len(self.Queue) > 0:
return self.Queue.popleft()
else :
raise Exception('Queue is empty')
def addItem(self, item):
if not len(self.Queue) < self.Queue.maxlen:
self.Queue.append(item)
else:
raise Exception('Queue is full')
def addItems(self, items):
if not len(items) + len(self.Queue) <= self.Queue.maxlen:
self.Queue.extend(items)
else:
raise Exception('Queue max length will be overflown')
def length(self):
return len(self.Queue)
def contains(self, item):
return item in self.Queue
def StackType():
return []
class PriorityQueueType():
def __init__(self, direction = 'smallest', f = lambda x: x):
self.container = []
self.direction = direction
self.func = f
def __delitem__(self, elem):
for i, (value, elem) in enumerate(self.container):
if elem == key:
self.container.pop(i)
def __len__(self):
return len(self.container)
def __contains__(self, elem):
return any(elem == child[1] for child in self.container)
def __getitem__(self, key):
for _, item in self.container:
if item == key:
return item
def append(self, elem):
bisect.insort_right(self.container, (self.func(elem), elem))
def pop(self):
if self.direction == 'smallest':
return self.container.pop(0)[1]
else:
return self.container.pop()[1]
class ImplementSearch:
def __init__(self, algo, problem, func = None):
if algo == "BFS":
self.breadthFirstSearch(problem)
elif algo == "DFS":
self.depthFirstSearch(problem)
elif algo == "AST":
self.aStarSearch(problem)
def breadthFirstSearch(self, problem):
ini_board = problem.initial_board
if problem.goalTest(ini_board):
print(ini_board)
return ini_board
frontier = QueueType()
frontier.addItem(ini_board)
explored = []
while frontier:
board = frontier.pop()
if(board.state not in explored):
explored.append(board.state)
print(board.state, board.action, board.path_cost)
print(".................................")
for child in board.expand(problem):
if child.state not in explored and child not in frontier:
if problem.goalTest(child):
print(child)
return child
sys.exit()
frontier.addItem(child)
return None
def depthFirstSearch(self, problem):
ini_board = problem.initial_board
frontier = StackType()
frontier.append(ini_board)
explored = []
while frontier:
board = frontier.pop()
if problem.goalTest(board):
return board
if(board.state not in explored):
explored.append(board.state)
print(board.state, board.action, board.path_cost)
print(".................................")
frontier.extend(child for child in board.expand(problem)
if child.state not in explored and
child not in frontier)
return None
def aStarSearch(self, problem):
func = heuristic_h
board = problem.initial_board
if problem.goalTest(board):
return board
frontier = PriorityQueueType("smallest", func)
frontier.append(board)
explored = []
while frontier:
board = frontier.pop()
if problem.goalTest(board):
print(board.solution())
return board
explored.append(board.state)
#print(board.state, board.action, board.path_cost, func(board))
#print(".................................")
for child in board.expand(problem):
if child.state not in explored and child not in frontier:
frontier.append(child)
elif child in frontier:
incumbent = frontier[child]
if func(child) < func(incumbent):
del frontier[incumbent]
frontier.append(child)
return None
def cacheFuncValues(fn, slot = None, maxsize = 32):
"""Memoize fn: make it remember the computed value for any argument list.
If slot is specified, store result in that slot of first argument.
If slot is false, use lru_cache for caching the values."""
if slot:
def memoized_fn(obj, *args):
if hasattr(obj, slot):
return getattr(obj, slot)
else:
val = fn(obj, *args)
setattr(obj, slot, val)
return val
else:
@functools.lru_cache(maxsize=maxsize)
def memoized_fn(*args):
return fn(*args)
return memoized_fn
def heuristic_h(board):
goal = ["0","1","2","3","4","5","6","7","8"]
return sum(abs(int(s) % 3 - int(g) % 3) + abs(int(s) // 3 - int(g) // 3)
for s, g in ((board.state.index(str(i)), goal.index(str(i))) for i in range(1, 9))) + board.path_cost
def writeToFile(line):
f = open('output.txt', 'a')
f.write(line)
f.write("\n")
f.close()
if __name__ == "__main__":
algo = sys.argv[1]
problem_string = sys.argv[2]
print(algo)
f = open('output.txt', 'w')
f.write("---------------------------------------------------------------------------\n")
f.write(" First Men AI Search Algorithm \n")
f.write("---------------------------------------------------------------------------\n")
f.close()
problem = Problem(problem_string.split(","), ["0","1","2","3","4","5","6","7","8"])
ImplementSearch(algo, problem)
| mit |
runarberg/servo | tests/wpt/harness/wptrunner/update/sync.py | 116 | 6508 | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import os
import shutil
import sys
import uuid
from .. import testloader
from base import Step, StepRunner
from tree import Commit
here = os.path.abspath(os.path.split(__file__)[0])
bsd_license = """W3C 3-clause BSD License
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of works must retain the original copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the original copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name of the W3C nor the names of its contributors may be
used to endorse or promote products derived from this work without
specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
"""
def copy_wpt_tree(tree, dest):
"""Copy the working copy of a Tree to a destination directory.
:param tree: The Tree to copy.
:param dest: The destination directory"""
if os.path.exists(dest):
assert os.path.isdir(dest)
shutil.rmtree(dest)
os.mkdir(dest)
for tree_path in tree.paths():
source_path = os.path.join(tree.root, tree_path)
dest_path = os.path.join(dest, tree_path)
dest_dir = os.path.split(dest_path)[0]
if not os.path.isdir(source_path):
if not os.path.exists(dest_dir):
os.makedirs(dest_dir)
shutil.copy2(source_path, dest_path)
for source, destination in [("testharness_runner.html", ""),
("testharnessreport.js", "resources/")]:
source_path = os.path.join(here, os.pardir, source)
dest_path = os.path.join(dest, destination, os.path.split(source)[1])
shutil.copy2(source_path, dest_path)
add_license(dest)
def add_license(dest):
"""Write the bsd license string to a LICENSE file.
:param dest: Directory in which to place the LICENSE file."""
with open(os.path.join(dest, "LICENSE"), "w") as f:
f.write(bsd_license)
class UpdateCheckout(Step):
"""Pull changes from upstream into the local sync tree."""
provides = ["local_branch"]
def create(self, state):
sync_tree = state.sync_tree
state.local_branch = uuid.uuid4().hex
sync_tree.update(state.sync["remote_url"],
state.sync["branch"],
state.local_branch)
sync_path = os.path.abspath(sync_tree.root)
if not sync_path in sys.path:
from update import setup_paths
setup_paths(sync_path)
def restore(self, state):
assert os.path.abspath(state.sync_tree.root) in sys.path
Step.restore(self, state)
class GetSyncTargetCommit(Step):
"""Find the commit that we will sync to."""
provides = ["sync_commit"]
def create(self, state):
if state.target_rev is None:
#Use upstream branch HEAD as the base commit
state.sync_commit = state.sync_tree.get_remote_sha1(state.sync["remote_url"],
state.sync["branch"])
else:
state.sync_commit = Commit(state.sync_tree, state.rev)
state.sync_tree.checkout(state.sync_commit.sha1, state.local_branch, force=True)
self.logger.debug("New base commit is %s" % state.sync_commit.sha1)
class LoadManifest(Step):
"""Load the test manifest"""
provides = ["manifest_path", "test_manifest", "old_manifest"]
def create(self, state):
from manifest import manifest
state.manifest_path = os.path.join(state.metadata_path, "MANIFEST.json")
# Conservatively always rebuild the manifest when doing a sync
state.old_manifest = manifest.load(state.tests_path, state.manifest_path)
state.test_manifest = manifest.Manifest(None, "/")
class UpdateManifest(Step):
"""Update the manifest to match the tests in the sync tree checkout"""
def create(self, state):
from manifest import manifest, update
update.update(state.sync["path"], "/", state.test_manifest)
manifest.write(state.test_manifest, state.manifest_path)
class CopyWorkTree(Step):
"""Copy the sync tree over to the destination in the local tree"""
def create(self, state):
copy_wpt_tree(state.sync_tree,
state.tests_path)
class CreateSyncPatch(Step):
"""Add the updated test files to a commit/patch in the local tree."""
def create(self, state):
if state.no_patch:
return
local_tree = state.local_tree
sync_tree = state.sync_tree
local_tree.create_patch("web-platform-tests_update_%s" % sync_tree.rev,
"Update %s to revision %s" % (state.suite_name, sync_tree.rev))
local_tree.add_new(os.path.relpath(state.tests_path,
local_tree.root))
updated = local_tree.update_patch(include=[state.tests_path,
state.metadata_path])
local_tree.commit_patch()
if not updated:
self.logger.info("Nothing to sync")
class SyncFromUpstreamRunner(StepRunner):
"""(Sub)Runner for doing an upstream sync"""
steps = [UpdateCheckout,
GetSyncTargetCommit,
LoadManifest,
UpdateManifest,
CopyWorkTree,
CreateSyncPatch]
| mpl-2.0 |
rechner/Taxidi | dblib/postgres.py | 1 | 40884 | #!/usr/bin/env python
#-*- coding:utf-8 -*-
# dblib/postgres.py (spaces, not tabs)
# PostgreSQL database driver for Taxídí.
# Zac Sturgeon <admin@jkltech.net>
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
# All database tables and features are documented here:
# http://jkltech.net/taxidi/wiki/Database
#Don't forget to commit the database often, even after a select.
#Had problems with the 'data' table being a reserved keyword, so table
# and column names should always be escaped. Column names are case-
# insensitive otherwise.
debug = True
import os
import sys
import logging
import time
import datetime
import psycopg2
import hashlib
# one directory up
_root_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
sys.path.insert(0, _root_dir)
import taxidi
#Signaling constants
SUCCESS = 1
OK = 1
FAIL = 2
EMPTY_RESULT = 4
USER_EXISTS = 8
CONSTRAINT_FAILED = 16
UNKNOWN_ERROR = 32
INVALID_PASSWORD = 64
AUTHORIZED = 1
UNAUTHORIZED = 0
NEW = 128
RESET_TABLES = 256
#Database schema version (integer):
database_version = 1
class Database:
"""
PostgreSQL driver for Taxídí database.
"""
def __init__(self, host, dbname, user, password, location='pyTaxidi'):
"""
Opens connection to a PostgreSQL database. Creates tables if they don't
exist, but expects the database was created by the admin.
"""
self.columns = """data.id, name, lastname, dob, data.activity, data.room, grade, phone,
"mobileCarrier", paging, parent1, "parent1Link", parent2,
"parent2Link", "parentEmail", medical, "joinDate", "lastSeen",
"lastModified", count, visitor, "noParentTag", barcode,
picture, authorized, unauthorized, notes"""
self.tables = [ "data", 'barcode', 'authorized', 'unauthorized',
'volunteers', 'categories', 'users', 'activities',
'services', 'rooms', 'carriers', 'statistics' ]
self.tableSQL = []
self.tableSQL.append("""CREATE TABLE data(id SERIAL primary key,
name text, lastname text, dob text, activity integer,
room integer, grade text, phone text,
"mobileCarrier" integer, paging text, parent1 text,
parent2 text, "parent1Link" text, "parent2Link" text,
"parentEmail" text, medical text, "joinDate" DATE,
"lastSeen" DATE, "lastModified" TIMESTAMP, count integer,
visitor bool, expiry text, "noParentTag" bool,
barcode integer, picture text, authorized integer,
unauthorized integer, notes text);""")
self.tableSQL.append("""CREATE TABLE barcode(id SERIAL primary key ,
value text NOT NULL, ref integer REFERENCES "data"(id));""")
self.tableSQL.append("""CREATE TABLE authorized(id SERIAL,
ref integer, name text, lastname text, dob text,
"docNumber" text, photo text, document text, "phoneHome" text,
"phoneMobile" text, "mobileCarrier" integer, notes text);""")
self.tableSQL.append("""CREATE TABLE unauthorized(id SERIAL,
ref integer, name text, lastname text, photo text,
document text, phone text, notes text);""")
self.tableSQL.append("""CREATE TABLE volunteers(id SERIAL,
name text, lastname text, dob text, email text,
username text, "phoneHome" text, "phoneMobile" text,
"mobileCarrier" integer, "backgroundCheck" bool,
"backgroundDocuments" text, profession text, title text,
company text, "jobContact" text, address text, city text,
zip text, state text, country text, nametag bool,
category text, subtitle text, services text, rooms text,
"notifoUser" text, "notifoSecret" text,
availability text, "joinDate" DATE, "lastSeen" DATE,
"lastModified" TIMESTAMP, picture text, notes text);""")
self.tableSQL.append("""CREATE TABLE categories(id SERIAL,
name text, admin integer);""")
self.tableSQL.append("""CREATE TABLE users(id SERIAL,
"user" text UNIQUE NOT NULL, hash text, salt text,
admin bool, "notifoUser" text, "notifoSecret" text,
"scATR" text, "leftHanded" bool, ref int, name text);""")
self.tableSQL.append("""CREATE TABLE activities(id SERIAL,
name text, prefix text, "securityTag" text, "securityMode" text,
"nametagEnable" bool, nametag text,
"parentTagEnable" bool, "parentTag" text,
admin integer, "autoExpire" bool, "notifyExpire" bool,
newsletter bool, "newsletterLink" text,
"registerSMSEnable" bool, "registerSMS" text,
"registerEmailEnable" bool, "registerEmail" text,
"checkinSMSEnable" bool, "checkinSMS" text,
"checkinEmailEnable" bool, "checkinEmail" text,
"parentURI" text, "alertText" text);""")
self.tableSQL.append("""CREATE TABLE services(id SERIAL,
name text, day integer, time TIME, "endTime" TIME);""")
self.tableSQL.append("""CREATE TABLE rooms(id SERIAL,
name text NOT NULL, activity integer NOT NULL,
"volunteerMinimum" integer, "maximumOccupancy" integer, camera text,
"cameraFPS" integer, admin integer, "notifoUser" text, "notifoSecret" text,
email text, mobile text, carrier integer);""")
self.tableSQL.append( """CREATE TABLE carriers(id SERIAL,
name text, region text, address text, subject text,
message text);""")
self.tableSQL.append("""CREATE TABLE statistics(id SERIAL,
person integer, date date,
service text, expires text,
checkin timestamp, checkout timestamp, code text, location text,
volunteer integer, activity text, room text);""")
#Setup logging
self.log = logging.getLogger(__name__)
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
formatter = logging.Formatter('[%(asctime)s] %(module)-6s [%(levelname)-8s] %(message)s')
ch.setFormatter(formatter)
self.log.addHandler(ch)
if debug:
self.log.setLevel(logging.DEBUG)
else:
self.log.setLevel(logging.INFO)
#Create connection:
try: #TODO: Add SSL/SSH tunnel
if ':' in host:
host, port = host.split(':')
else:
port = 5432
self.conn = psycopg2.connect(host=host, database=dbname,
user=user, password=password, port=port)
#application_name=location)
self.cursor = self.conn.cursor()
except psycopg2.OperationalError as e:
if e.pgcode == '28P01' or e.pgcode == '28000':
raise DatabaseError(INVALID_PASSWORD)
else:
#Unhandled error. Show it to the user.
raise DatabaseError(FAIL, e)
self.log.info("Created PostgreSQL database instance on host {0}.".format(host))
self.log.debug("Checking for tables and creating them if not present....")
self.status = OK
self.createTables()
def spawnCursor(self):
"""
Returns a new cursor object (for multi-threadding use).
Delete it when done.
"""
return self.conn.cursor()
def createTables(self):
for i in range(len(self.tables)):
#Not user-controled data, so a .format() is okay here.
exists = self.execute(
"SELECT true FROM pg_class WHERE relname = '{0}';".format(self.tables[i]))
if not exists:
#Create it:
self.status = RESET_TABLES
self.log.info("Creating table {0}".format(self.tables[i]))
self.execute(self.tableSQL[i])
self.commit()
self.commit()
def commit(self):
self.log.debug('Committed database')
self.conn.commit()
def close(self):
"""
Close the connection and clean up the objects.
Don't forget to call this when exiting the program.
"""
self.cursor.close()
self.conn.close()
del self.cursor
del self.conn
def execute(self, sql, args=(''), cursor=None):
"""Executes SQL, reporting debug to the log. For internal use."""
if debug:
sql = sql.replace(' ', '').replace('\n', ' ') #make it pretty
if args != (''):
self.log.debug(sql % args)
else:
self.log.debug(sql)
try:
self.cursor.execute(sql, args)
try:
return self.cursor.fetchall()
except psycopg2.ProgrammingError:
return True
except (psycopg2.ProgrammingError, psycopg2.OperationalError) as e:
self.log.error('psycopg2 returned operational error: {0}'
.format(e))
if self.conn:
self.conn.rollback() #drop any changes to preserve db.
raise
def dict_factory(self, row):
d = {}
for idx, col in enumerate(self.cursor.description):
d[col[0]] = row[idx]
return d
def to_dict(self, a):
"""
Converts results from a cursor object to a nested dictionary.
"""
ret = []
for i in a:
ret.append(self.dict_factory(i)) #return as a nested dictionary
return ret
# == data functions ==
# Populate all fields for registering a child only. Entries without
# a default are mandatory. Form should check for missing stuff.
def Register(self, name, lastname, phone, parent1, paging='',
mobileCarrier=0, activity=0, room=0, grade='', parent2='',
parent1Link='', parent2Link='', parentEmail='', dob='',
medical='', joinDate='', lastSeen='', lastModified='', count=0,
visitor=False, expiry=None, noParentTag=None, barcode=None,
picture='', authorized=None, unauthorized=None, notes=''):
"""Enter a new child's record into the `"data"` table.
name, lastname, phone, parent1, paging=''
mobileCarrier=0, activity=0, room=0, grade='', parent2='',
parent1Link='', parent2Link='', parentEmail='', dob=''
medical='', joinDate='', lastSeen='', lastModified='', count=0
visitor=False, noParentTag=False, barcode=None, picture='',
authorized=None, unauthorized=None, notes=''
Returns the id of the newly created record.
Be sure to create entry in barcode, unauthorized, or authorized table
before creating a record here.
If registering, be sure to call this before checkin() on the record itself.
Remember to call commit() after creating all these entries.
"""
#set dates:
if joinDate == '': #Generally should always be true (unless
joinDate = str(datetime.date.today()) #importing from script
if lastSeen == '':
lastSeen = str(datetime.date.today()) #ISO8601 (YYYY-MM-DD)
if lastModified == '':
#should be plain ISO 8601 (required for Postgres timestamp type)
#~ lastModified = time.strftime("%Y-%m-%dT%H:%M:%S", time.localtime())
lastModified = datetime.datetime.now()
#~ lastModified = time.ctime() #without timezone.
#escape and execute
self.execute("""INSERT INTO "data"(name, lastname, dob, phone,
paging, parent1, "mobileCarrier", activity, room, grade,
parent2, "parent1Link", "parent2Link", "parentEmail", medical,
"joinDate", "lastSeen", "lastModified", count, visitor, expiry,
"noParentTag", barcode, picture, notes) VALUES
(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,
%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s);""",
(name, lastname, dob, phone, paging, parent1, mobileCarrier,
activity, room, grade, parent2, parent1Link, parent2Link,
parentEmail, medical, joinDate, lastSeen, lastModified, count,
visitor, expiry, noParentTag, barcode, picture, notes))
self.commit()
ret = self.execute("""SELECT id FROM "data" WHERE
name = %s AND lastname = %s
AND phone = %s""", (name, lastname, phone))
self.commit()
if len(ret) > 1:
for i in ret[0]:
self.log.warn('Duplicate entry found at {0}.'.format(i))
if ret == []:
raise DatabaseError(EMPTY_RESULT, 'Record not committed.')
else:
return ret[0][0]
def Delete(self, index):
"""Delete a row in the data table by index."""
self.execute("DELETE FROM \"data\" WHERE id = %s;", (index,))
self.commit()
def Update(self, index, name, lastname, phone, parent1, paging='',
mobileCarrier=0, activity=0, room=0, grade='', parent2='',
parent1Link='', parent2Link='', parentEmail='', dob='',
medical='', joinDate=None, lastSeen=None, count=0,
visitor=False, expiry=None, noParentTag=None, barcode=None,
picture='', authorized=None, unauthorized=None, notes=''):
"""Update a record. Pass index as first argument. lastModified automatically set.
index, name, lastname, dob, phone, paging, and parent1 are mandatory.
Defaults are as follows: mobileCarrier=0, activity=0, room=0, grade='',
parent2='', parent1Link='', parent2Link='', parentEmail='', medical='',
joinDate='', lastSeen='', visitor=False,
noParentTag=False, barcode='', picture='', notes=''
"""
try:
self.execute("UPDATE \"data\" SET name=%s, lastname=%s WHERE id=%s;", (name, lastname, index))
self.execute("UPDATE \"data\" SET dob=%s WHERE id=%s;", (dob, index))
self.execute("UPDATE \"data\" SET phone=%s, paging=%s WHERE id=%s;",(phone, paging, index))
self.execute("UPDATE \"data\" SET \"mobileCarrier\"=%s WHERE id=%s;",
(mobileCarrier, index))
self.execute("""UPDATE "data" SET parent1=%s, parent2=%s,
"parent1Link"=%s, "parent2Link"=%s WHERE id=%s""", (parent1,
parent2, parent1Link, parent2Link, index))
self.execute("UPDATE \"data\" SET activity=%s, room=%s, grade=%s WHERE id=%s;",
(activity, room, grade, index))
self.execute("UPDATE \"data\" SET \"parentEmail\"=%s, medical=%s WHERE id=%s;",
(parentEmail, medical, index))
if joinDate != None:
self.execute("UPDATE \"data\" SET \"joinDate\"=%s WHERE id=%s;",
(joinDate, index))
if lastSeen != None:
self.execute("UPDATE \"data\" SET \"lastSeen\"=%s WHERE id=%s;",
(lastSeen, index))
self.execute("UPDATE \"data\" SET \"lastModified\"=%s WHERE id=%s;",
(datetime.datetime.now(), index))
self.execute("""UPDATE "data" SET visitor=%s, expiry=%s, "noParentTag"=%s,
barcode=%s, picture=%s, notes=%s WHERE id=%s;""", (visitor, expiry,
noParentTag, barcode, picture, notes, index))
except psycopg2.Error as e:
self.log.error(e)
self.log.error("Error while updating. Rolling back transaction....")
self.conn.rollback()
raise
self.commit()
# === end data functions ===
# === begin search functions ===
def Search(self, query):
"""
Generic search function.
Searches first through `data`, then passes to SearchVolunteer()
Accepts query as first argument. Searches the following in data table:
- Last four digits of phone number (if len == 4)
- paging(?)
- lastname
- firstname
Then searches through volunteers table.
"""
a = []
if query.isdigit() and (len(query) == 4 or len(query) == 7) \
or query[0] == '+':
#search by phone.
a = self.SearchPhone(query)
if not query.isdigit(): #Search in names.
a = self.SearchName(query)
if len(a) == 0:
#Search partial names:
a = self.SearchName(query+'%')
#check if hex:
try:
hexval = int(query, 16)
isHex = True
except:
isHex = False
if len(query) == 3 or (isHex and len(query) == 4) and len(a) == 0:
a = self.SearchSecure(query)
if len(a) == 0: #Catch barcodes
a = self.SearchBarcode(query)
#TODO: Search volunteers:
return a
def SearchName(self, query):
"""
Searches only in name, lastname, parent's column.
Returns *. '*' and '%' are treated as wild-card characters, and will
search using the LIKE operator.
"""
if ("%" in query) or ("*" in query):
query = query.replace("*", "%")
a = self.execute("""SELECT DISTINCT {0} FROM "data" WHERE name LIKE %s
OR lastname LIKE %s
or parent1 LIKE %s
or parent2 LIKE %s
ORDER BY lastname;
""".format(self.columns), (query,)*4)
else:
a = self.execute("""SELECT DISTINCT {0} FROM "data" WHERE
name ILIKE %s
OR lastname ILIKE %s
OR parent1 ILIKE %s
OR parent2 ILIKE %s
ORDER BY lastname;
""".format(self.columns), (query,)*4)
ret = []
for i in a:
ret.append(self.dict_factory(i)) #return as a nested dictionary
return ret
def SearchBarcode(self, query):
"""
Searches for an entry (only in the data table) by barcode.
"""
a = self.execute("""SELECT DISTINCT {0} FROM "data"
INNER JOIN barcode ON "data".id = barcode.ref
WHERE barcode.value = %s;
""".format(self.columns), (query,))
ret = []
for i in a:
ret.append(self.dict_factory(i)) #return as a nested dictionary
return ret
def SearchPhone(self, query):
"""
Searches for an entry by entire or last four digits of phone number.
"""
query = str(query)
#Most of this is taken out of my input validator
if len(query) == 4:
#Search by last four:
query = '%' + query
a = self.execute("""SELECT DISTINCT {0} FROM "data"
WHERE phone LIKE %s
ORDER BY lastname;
""".format(self.columns), (query,))
elif query.isdigit() and len(query) == 10 \
and query[0] not in '01' and query[3] not in '01': #US: '4805551212'
a = self.execute("""SELECT DISTINCT {0} FROM "data"
WHERE phone = %s
ORDER BY lastname;
""".format(self.columns), (query,))
elif len(query) == 12 and query[3] in '.-/' \
and query[7] in '.-/': #US: '334-555-1212'
trans = Translator(delete='+(-)./ ')
query = trans(query.encode('ascii'))
a = self.execute("""SELECT DISTINCT {0} FROM "data"
WHERE phone = %s
ORDER BY lastname;
""".format(self.columns), (query,))
elif query[0] == '(' and len(query) == 14: #US: (480) 555-1212
query = query[1:4] + query[6:9] + query[10:14]
a = self.execute("""SELECT DISTINCT {0} FROM "data"
WHERE phone = %s
ORDER BY lastname;
""".format(self.columns), (query,))
elif query[0] == '+': #International format
trans = Translator(delete='+(-)./ ')
query = trans(query.encode('ascii'))
a = self.execute("""SELECT DISTINCT {0} FROM "data"
WHERE phone = %s
ORDER BY lastname;
""".format(self.columns), (query,))
elif len(query) == 7:
#Search by last seven:
query = '%' + query
a = self.execute("""SELECT DISTINCT {0} FROM "data"
WHERE phone LIKE %s
ORDER BY lastname;
""".format(self.columns), (query,))
else:
self.log.warn("Search key {0} probably isn't a phone number.")
a = self.execute("""SELECT DISTINCT {0} FROM "data"
WHERE phone = %s
ORDER BY lastname;
""".format(self.columns), (query,))
ret = []
for i in a:
ret.append(self.dict_factory(i)) #return as a nested dictionary
return ret
def SearchSecure(self, query):
"""
Searches for a record by the security code assigned at check-in, if applicable.
"""
a = self.execute("""SELECT DISTINCT {0} FROM data
INNER JOIN statistics ON data.id = statistics.person
WHERE statistics.code = %s;
""".format(self.columns), (query.upper(),))
ret = []
for i in a:
ret.append(self.dict_factory(i)) #return as a nested dictionary
return ret
# === end search functions ===
def GetRecordByID(self, ref):
"""
Returns a single row specified by id.
"""
a = self.execute("SELECT * FROM data WHERE id = %s", (ref,))
ret = []
for i in a:
ret.append(self.dict_factory(i)) #return as a nested dictionary
return ret[0]
# === barcode functions ===
def GetBarcodes(self, record):
"""
Returns all barcodes listed for a given record ID.
"""
a = self.execute("""SELECT DISTINCT id, value FROM barcode
WHERE ref = %s ORDER BY id;""", (record,))
ret = []
for i in a:
ret.append(self.dict_factory(i)) #return as a nested dictionary
#~ ret.append(i)
return ret
def AddBarcode(self, record, value):
self.execute("""INSERT INTO barcode(value, ref)
VALUES (%s, %s);""", (value, record))
def RemoveBarcode(self, ref):
self.execute("DELETE FROM barcode WHERE id = %s;", (ref,))
def RemoveAllBarcodes(self, ref):
"""
Deletes all barcodes for a given record (nuke)
"""
self.execute("DELETE FROM barcode WHERE ref = %s;", (ref,))
def UpdateBarcode(self, ref, value):
self.execute("UPDATE barcode SET value = %s WHERE id = %s", (value, ref))
# === end barcode functions ===
# === services functions ===
def GetServices(self):
a = self.execute("SELECT * FROM services ORDER BY id;")
ret = []
for i in a:
ret.append(self.dict_factory(i)) #return as a nested dictionary
return ret
def AddService(self, name, day=0, time='00:00:00', endTime='23:59:59'):
self.execute("""INSERT INTO services(name, day, time, "endTime")
VALUES (%s, %s, %s, %s);""", (name, day, time, endTime))
def RemoveService(self, ref):
self.execute("DELETE FROM services WHERE id = %s;", (ref,))
def UpdateService(self, ref, name, day, time, endTime):
self.execute("""UPDATE services SET name = %s,
day = %s, time = %s, "endTime" = %s WHERE id = %s;""",
(name, day, time, endTime, ref))
# === end services functions ===
# === activities functions ===
def GetActivities(self):
a = self.execute("SELECT * FROM activities;")
ret = []
for i in a:
if i == None: i = u'—'
ret.append(self.dict_factory(i)) #return as a nested dictionary
return ret
def GetActivity(self, ref):
"""
Converts a reference to the activity table to an explicit string value
(for reading a record's assigned activity with no forgein key support).
"""
a = self.execute("SELECT name FROM activities WHERE id = %s;", (ref,))
if len(a) > 0:
return a[0][0]
else:
return None
def GetActivityById(self, ref):
a = self.execute("SELECT * FROM activities WHERE id = %s;", (ref,))
if len(a) > 0:
return self.dict_factory(a[0])
else:
return None
def AddActivity(self, name, prefix='', securityTag=False, securityMode='simple',
nametag='default', nametagEnable=True,
parentTag='default', parentTagEnable=True, admin=None,
autoExpire = False, notifyExpire = False, newsletter=False,
newsletterLink='', parentURI='', alert=''):
if prefix == '' or prefix == None:
prefix = name[0].upper()
self.execute("""INSERT INTO activities(name, prefix, "securityTag",
"securityMode", "nametagEnable", nametag,
"parentTagEnable", "parentTag", admin, "autoExpire",
"notifyExpire", newsletter, "newsletterLink",
"parentURI", "alertText")
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s,
%s, %s, %s, %s, %s, %s);""",
(name, prefix, securityTag, securityMode,
nametagEnable, nametag, parentTagEnable, parentTag,
admin, autoExpire, notifyExpire, newsletter,
newsletterLink, parentURI, alert))
def RemoveActivity(self, ref):
self.execute("DELETE FROM activities WHERE id = %s;", (ref,))
def UpdateActivity(self, ref, name, prefix, securityTag, securityMode,
nametag, nametagEnable, parentTag,
parentTagEnable, admin, autoExpire, notifyExpire,
newsletter, newsletterLink):
if prefix == '' or prefix == None:
prefix = name[0].upper()
self.execute("""UPDATE activities SET name = %s, prefix = %s,
securityTag = %s, securityMode = %s,
nametag = %s, nametagEnable = %s, parentTag = %s,
parentTagEnable = %s, admin = %s, autoExpire = %s,
notifyExpire = %s, newsletter = %s,
newsletterLink = %s WHERE id = %s;""", (name, prefix,
securityTag, securityMode, nametag,
nametagEnable, parentTag, parentTagEnable, admin,
autoExpire, notifyExpire, newsletter,
newsletterLink, ref))
# === end activities functions ==
# === rooms functions ===
def AddRoom(self, name, activity, volunteerMinimum=0, maximumOccupancy=0,
camera='', cameraFPS=0, admin=0, notifoUser=None,
notifoSecret=None, email='', mobile='', carrier=None):
#Check to see that activity exists:
ret = self.execute('SELECT id FROM activities WHERE id = %s;',
(activity,))
if len(ret) == 1:
#Activity exists. Create room.
self.execute("""INSERT INTO rooms(name, activity, "volunteerMinimum",
"maximumOccupancy", camera, "cameraFPS", admin,
"notifoUser", "notifoSecret", email, mobile, carrier)
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)""",
(name, activity, volunteerMinimum, maximumOccupancy,
camera, cameraFPS, admin, notifoUser,
notifoSecret, email, mobile, carrier))
return SUCCESS
else:
return CONSTRAINT_FAILED #Forgein key constraint failed
def GetRooms(self):
a = self.execute('SELECT * FROM rooms;')
ret = []
for i in a:
ret.append(self.dict_factory(i)) #return as a nested dictionary
return ret
def GetRoomByID(self, ref):
"""
Returns a room name specified from a reference (for displaying results).
"""
a = self.execute('SELECT name FROM rooms WHERE id = %s;', (ref,))
if a != None:
try:
return a[0][0] #Return string
except IndexError:
return ''
else:
return ''
def GetRoom(self, activity):
"""
Returns rooms dictionary matching a given activity (by name).
"""
a = self.execute("""SELECT rooms.*
FROM rooms
INNER JOIN activities ON
activities.id = rooms.activity
WHERE activities.name = %s;""",
(activity,))
ret = []
for i in a:
ret.append(self.dict_factory(i)) #return as a nested dictionary
return ret
def GetRoomID(self, name):
"""
Return's a room's primary key (id) given a name.
"""
a = self.execute("SELECT id FROM rooms WHERE name = %s;", (name,))
if a != None:
return a[0]
else:
return ''
def RemoveRoom(self, ref):
self.execute("DELETE FROM rooms WHERE id = %s;", (ref,))
# === end room functions ===
# === users functions ===
def GetUsers(self):
a = self.execute("""SELECT "user", admin, "notifoUser", "notifoSecret",
"scATR", "leftHanded", ref FROM users;""")
return self.to_dict(a)
def GetUser(self, user):
#Should only return one row
return self.to_dict(self.execute("SELECT * FROM users WHERE \"user\" = %s;", (user,)))[0]
def UserExists(self, user):
a = self.execute("SELECT id FROM \"users\" WHERE \"user\"= %s;", (user,))
self.commit()
if len(a) == 0:
return False
else:
return True
def AddUser(self, user, password, admin=False, notifoUser=None,
notifoSecret=None, scATR=None, leftHanded=False, ref=None):
#Check that the user doesn't exist:
if len(self.execute("SELECT * FROM users WHERE user = %s;", \
(user,))) != 0:
self.commit()
return USER_EXISTS
salt = os.urandom(29).encode('base_64').strip('\n') #Get a salt
if password == '': #Set a random password
password = os.urandom(8).encode('base_64').strip('\n')
ph = hashlib.sha256(password + salt)
ph.hexdigest()
try:
self.execute("""INSERT INTO "users"("user", hash, salt, admin, "notifoUser",
"notifoSecret", "scATR", "leftHanded", ref) VALUES
(%s, %s, %s, %s, %s, %s, %s, %s, %s);""",
(user, ph.hexdigest(), salt, admin, notifoUser,
notifoSecret, scATR, leftHanded, ref))
except psycopg2.IntegrityError:
return USER_EXISTS
finally:
self.commit()
return SUCCESS
def RemoveUser(self, user):
"""
Remove an user from the system by username.
"""
self.execute("DELETE FROM users WHERE \"user\" = %s;", (user,))
def AuthenticateUser(self, user, password):
if self.UserExists(user):
info = self.GetUser(user)
passhash = hashlib.sha256(password + info['salt'])
if info['hash'] == passhash.hexdigest():
return 1
return 0
# == end users functions ==
# === Check-in functions ===
def DoCheckin(self, person, services, expires, code, location, activity, room, cursor=None):
"""
person: id reference of who's being checked-in.
services: a tuple of services to be checked-in for. Pass singleton if only one.
Services should be passed in chronological order!
expires: expiration time, if applicable, of the last service chronologically.
code: secure code, or hashed value on child's tag if not in simple mode.
location: text location to identify kiosk used for check-in.
activity: activity name as string.
room: room name as string.
"""
expiry = None
for service in services:
if services.index(service) + 1 == len(services): #On the last item
expiry = expires
#~ try:
self.execute("""INSERT INTO statistics(person, date, service, expires,
checkin, checkout, code, location, activity, room)
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s);""",
(person, str(datetime.date.today()), service, expiry,
datetime.datetime.now(), None, code, location, activity, room),
cursor)
#~ except sqlite.Error as e:
#~ raise DatabaseError(UNKNOWN_ERROR, e.args[0])
#~ #TODO: Incrament count, update last seen date.
count = self.execute("SELECT count FROM data WHERE id = %s;", (person,))
count = int(count[0][0]) + 1
today = datetime.date.today()
self.execute("UPDATE data SET count = %s, \"lastSeen\" = %s WHERE id = %s;",
(count, today, person))
self.commit()
def DoCheckout(self, person):
"""
Marks a record as checked-out.
"""
self.execute("UPDATE statistics SET checkout = %s WHERE person = %s AND \"date\" = date('now');",
(datetime.datetime.now(), person))
self.commit()
# === end checkin functions ===
def GetHistory(self, person):
"""
Returns check-in history.
"""
a = self.execute("SELECT date, service, checkin, checkout, room, location FROM statistics WHERE person = %s;", (person,))
ret = []
for i in a:
ret.append(self.dict_factory(i)) #return as a nested dictionary
return ret
def GetStatus(self, ref, full=False):
"""
Returns the check-in status for a specified record, according to the
constants defined in taxidi.py. (STATUS_NONE, STATUS_CHECKED_IN, or
STATUS_CHECKED_OUT). If full=True, then the status is returned as part
of a dictionary of the matching statistics row. Only returns values from
today's date.
"""
a = self.execute("SELECT * FROM statistics WHERE person = %s AND checkin > date('now');", (ref,))
ret = []
for i in a:
ret.append(self.dict_factory(i)) #return as a nested dictionary
if len(ret) == 0:
if full:
return { 'status': taxidi.STATUS_NONE, 'code': None }
return taxidi.STATUS_NONE
elif len(ret) == 1:
#Only one check-in. Return what's appropriate:
ret = ret[0]
else:
#Just check the last check-in for now
ret = ret[-1]
if ret['checkin'] == None: #Not checked-in (this shouldn't happen)
if full:
ret['status'] = taxidi.STATUS_NONE
return ret
return taxidi.STATUS_NONE
else:
if ret['checkout'] == None: #Checked-in
if full:
ret['status'] = taxidi.STATUS_CHECKED_IN
return ret
return taxidi.STATUS_CHECKED_IN
else:
if full:
ret['status'] = taxidi.STATUS_CHECKED_OUT
return ret
return taxidi.STATUS_CHECKED_OUT
class DatabaseError(Exception):
def __init__(self, code, value=''):
if value == '':
self.error = 'Generic database error.'
if code == EMPTY_RESULT:
self.error = 'Query returned empty result'
elif code == CONSTRAINT_FAILED:
self.error = 'Unique key constraint failed.'
elif code == USER_EXISTS:
self.error = 'The user specified already exists.'
elif code == INVALID_PASSWORD:
self.error = 'Invalid username, password, or authorization specification.'
else:
self.error = str(value).replace('\t', '').capitalize()
self.code = code
def __str__(self):
return str(self.error).replace('\t', '').capitalize()
#~ return repr(self.error)
if __name__ == '__main__':
try:
db = Database('localhost:15432', 'taxidi', 'taxidi', 'lamepass')
except DatabaseError as e:
print e.error
exit()
import pprint
#~ newRecord = db.Register("Zac", "Sturgeon", "(212) 555-5555", "Diana Sturgeon")
#~ db.Delete(newRecord)
#~ print db.execute("SELECT * FROM \"data\";")
#~ pprint.pprint( db.Search('sturgeon') )
#~ db.Update(12, "Zachary", "Sturgeon", "(212) 555-5555", "James Sturgeon")
#Barcode functions:
#~ db.AddBarcode(1, '12345')
#~ db.RemoveBarcode(1)
#~ pprint.pprint(db.Search("ABCD"))
#~ codes = db.GetBarcodes(2)
#~ pprint.pprint(codes)
#~ print
#~ print [ a['value'] for a in codes ]
print db.Search("9989")
#Services:
#~ db.AddService('First Service')
#~ print db.GetServices()
#Activities:
#~ db.AddActivity('Explorers', securityTag=True, securityMode='md5',
#~ nametagEnable=True, parentTagEnable=True,
#~ alert='Nursery alert text goes here.')
#~ db.AddActivity('Outfitters', securityTag=False, securityMode='simple',
#~ nametagEnable=True, parentTagEnable=False,
#~ alert='Nursery alert text goes here.')
#~ db.commit()
#~ print db.GetActivityById(1)
#User functions:
#~ db.RemoveUser('admin')
#~ db.commit()
#~ if db.AddUser('admin', 'password', admin=True) == USER_EXISTS: print "User admin already exists"
#~ db.commit()
#~ pprint.pprint( db.GetUsers() )
#~ print
#~ print (db.AuthenticateUser('admin', 'badpassword') == AUTHORIZED) #False
#~ print (db.AuthenticateUser('baduser', 'pass') == AUTHORIZED) #False
#~ print (db.AuthenticateUser(u'admin', u'password') == AUTHORIZED) #True
#~ print (db.AuthenticateUser('admin', 'password') == AUTHORIZED) #True
#Check-in:
#~ db.DoCheckin(2, ('First Service', 'Second Service', 'Third Service'),
#~ '14:59:59', '5C55', 'Kiosk1', 'Explorers', 'Jungle Room')
#Rooms:
#~ db.AddRoom("Bunnies", 1)
#~ db.AddRoom("Ducks", 1)
#~ db.AddRoom("Kittens", 1)
#~ db.AddRoom("Robins", 1)
#~ db.AddRoom("Squirrels", 1)
#~ db.AddRoom("Puppies", 1)
#~ db.AddRoom("Caterpillars", 1)
#~ db.AddRoom("Butterflies", 1)
#~ db.AddRoom("Turtles", 1)
#~ db.AddRoom("Frogs", 1)
#~ db.AddRoom("Outfitters", 2)
#~ pprint.pprint(db.GetRoomByID(8))
pprint.pprint(db.GetHistory(1))
db.commit()
| gpl-3.0 |