code
stringlengths 2
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 2
1.05M
|
---|---|---|---|---|---|
import platform
import glob
from .io import DxlIO, Dxl320IO, DxlError
from .error import BaseErrorHandler
from .controller import BaseDxlController
from .motor import DxlMXMotor, DxlAXRXMotor, DxlXL320Motor
from ..robot import Robot
def _get_available_ports():
""" Tries to find the available usb2serial port on your system. """
if platform.system() == 'Darwin':
return glob.glob('/dev/tty.usb*')
elif platform.system() == 'Linux':
return glob.glob('/dev/ttyACM*') + glob.glob('/dev/ttyUSB*')
elif platform.system() == 'Windows':
import _winreg
import itertools
ports = []
path = 'HARDWARE\\DEVICEMAP\\SERIALCOMM'
key = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, path)
for i in itertools.count():
try:
ports.append(str(_winreg.EnumValue(key, i)[1]))
except WindowsError:
return ports
return []
def get_available_ports(only_free=False):
ports = _get_available_ports()
if only_free:
ports = list(set(ports) - set(DxlIO.get_used_ports()))
return ports
def find_port(ids, strict=True):
""" Find the port with the specified attached motor ids.
:param list ids: list of motor ids to find
:param bool strict: specify if all ids should be find (when set to False, only half motor must be found)
.. warning:: If two (or more) ports are attached to the same list of motor ids the first match will be returned.
"""
for port in get_available_ports():
for DxlIOCls in (DxlIO, Dxl320IO):
try:
with DxlIOCls(port) as dxl:
founds = len(dxl.scan(ids))
if strict and founds == len(ids):
return port
if not strict and founds >= len(ids) / 2:
return port
except DxlError:
continue
raise IndexError('No suitable port found for ids {}!'.format(ids))
def autodetect_robot():
""" Creates a :class:`~pypot.robot.robot.Robot` by detecting dynamixel motors on all available ports. """
motor_controllers = []
for port in get_available_ports():
for DxlIOCls in (DxlIO, Dxl320IO):
dxl_io = DxlIOCls(port)
ids = dxl_io.scan()
if not ids:
dxl_io.close()
continue
models = dxl_io.get_model(ids)
motorcls = {
'MX': DxlMXMotor,
'RX': DxlAXRXMotor,
'AX': DxlAXRXMotor,
'XL': DxlXL320Motor
}
motors = [motorcls[model[:2]](id, model=model)
for id, model in zip(ids, models)]
c = BaseDxlController(dxl_io, motors)
motor_controllers.append(c)
return Robot(motor_controllers)
| manon-cortial/pypot | pypot/dynamixel/__init__.py | Python | gpl-3.0 | 2,888 |
from numpy import sqrt
from pacal.standard_distr import NormalDistr, ChiSquareDistr
from pacal.distr import Distr, SumDistr, DivDistr, InvDistr
from pacal.distr import sqrt as distr_sqrt
class NoncentralTDistr(DivDistr):
def __init__(self, df = 2, mu = 0):
d1 = NormalDistr(mu, 1)
d2 = distr_sqrt(ChiSquareDistr(df) / df)
super(NoncentralTDistr, self).__init__(d1, d2)
self.df = df
self.mu = mu
def __str__(self):
return "NoncentralTDistr(df={0},mu={1})#{2}".format(self.df, self.mu, self.id())
def getName(self):
return "NoncT({0},{1})".format(self.df, self.mu)
class NoncentralChiSquareDistr(SumDistr):
def __new__(cls, df, lmbda = 0):
assert df >= 1
d1 = NormalDistr(sqrt(lmbda))**2
if df == 1:
return d1
d2 = ChiSquareDistr(df - 1)
ncc2 = super(NoncentralChiSquareDistr, cls).__new__(cls, d1, d2)
super(NoncentralChiSquareDistr, ncc2).__init__(d1, d2)
ncc2.df = df
ncc2.lmbda = lmbda
return ncc2
def __init__(self, df, lmbda = 0):
pass
def __str__(self):
return "NoncentralChiSquare(df={0},lambda={1})#{2}".format(self.df, self.lmbda, self.id())
def getName(self):
return "NoncChi2({0},{1})".format(self.df, self.lmbda)
class NoncentralBetaDistr(InvDistr):
def __init__(self, alpha = 1, beta = 1, lmbda = 0):
d = 1 + ChiSquareDistr(2.0 * beta) / NoncentralChiSquareDistr(2 * alpha, lmbda)
super(NoncentralBetaDistr, self).__init__(d)
self.alpha = alpha
self.beta = beta
self.lmbda = lmbda
def __str__(self):
return "NoncentralBetaDistr(alpha={0},beta={1},lambda={2})#{3}".format(self.alpha, self.beta, self.lmbda, self.id())
def getName(self):
return "NoncBeta({0},{1},{2})".format(self.alpha, self.beta, self.lmbda)
class NoncentralFDistr(DivDistr):
def __init__(self, df1 = 1, df2 = 1, lmbda = 0):
d1 = NoncentralChiSquareDistr(df1, lmbda) / df1
d2 = ChiSquareDistr(df2) / df2
super(NoncentralFDistr, self).__init__(d1, d2)
self.df1 = df1
self.df2 = df2
self.lmbda = lmbda
def __str__(self):
return "NoncentralFDistr(df1={0},df2={1},lambda={2})#{3}".format(self.df1, self.df2, self.lmbda, self.id())
def getName(self):
return "NoncF({0},{1},{2})".format(self.df1, self.df2, self.lmbda)
| ianmtaylor1/pacal | pacal/stats/noncentral_distr.py | Python | gpl-3.0 | 2,437 |
from ert.cwrap import CWrapper, BaseCClass
from ert.enkf import ENKF_LIB
from ert.util import StringList
class SummaryKeyMatcher(BaseCClass):
def __init__(self):
c_ptr = SummaryKeyMatcher.cNamespace().alloc()
super(SummaryKeyMatcher, self).__init__(c_ptr)
def addSummaryKey(self, key):
assert isinstance(key, str)
return SummaryKeyMatcher.cNamespace().add_key(self, key)
def __len__(self):
return SummaryKeyMatcher.cNamespace().size(self)
def __contains__(self, key):
return SummaryKeyMatcher.cNamespace().match_key(self, key)
def isRequired(self, key):
""" @rtype: bool """
return SummaryKeyMatcher.cNamespace().is_required(self, key)
def keys(self):
""" @rtype: StringList """
return SummaryKeyMatcher.cNamespace().keys(self)
def free(self):
SummaryKeyMatcher.cNamespace().free(self)
cwrapper = CWrapper(ENKF_LIB)
cwrapper.registerObjectType("summary_key_matcher", SummaryKeyMatcher)
SummaryKeyMatcher.cNamespace().alloc = cwrapper.prototype("c_void_p summary_key_matcher_alloc()")
SummaryKeyMatcher.cNamespace().free = cwrapper.prototype("void summary_key_matcher_free(summary_key_matcher)")
SummaryKeyMatcher.cNamespace().size = cwrapper.prototype("int summary_key_matcher_get_size(summary_key_matcher)")
SummaryKeyMatcher.cNamespace().add_key = cwrapper.prototype("void summary_key_matcher_add_summary_key(summary_key_matcher, char*)")
SummaryKeyMatcher.cNamespace().match_key = cwrapper.prototype("bool summary_key_matcher_match_summary_key(summary_key_matcher, char*)")
SummaryKeyMatcher.cNamespace().keys = cwrapper.prototype("stringlist_obj summary_key_matcher_get_keys(summary_key_matcher)")
SummaryKeyMatcher.cNamespace().is_required = cwrapper.prototype("bool summary_key_matcher_summary_key_is_required(summary_key_matcher, char*)")
| iLoop2/ResInsight | ThirdParty/Ert/devel/python/python/ert/enkf/summary_key_matcher.py | Python | gpl-3.0 | 1,882 |
#!/usr/bin/python
# coding: utf8
import os
import subprocess
from '{% if cookiecutter.namespace %}{{ cookiecutter.namespace }}.{{ cookiecutter.project_slug }}{% else %}{{ cookiecutter.project_slug }}{% endif %}'.commands.base import BaseCommand
from '{% if cookiecutter.namespace %}{{ cookiecutter.namespace }}.{{ cookiecutter.project_slug }}{% else %}{{ cookiecutter.project_slug }}{% endif %}' import PROJECT_DIR
class Configure(BaseCommand):
def execute(self):
os.chdir(os.path.join(PROJECT_DIR, 'build'))
subprocess.run(['cmake', PROJECT_DIR])
| antoinedube/numeric-cookiecutter | {{cookiecutter.namespace+'.'+cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/commands/configure.py | Python | gpl-3.0 | 571 |
#!/usr/bin/env python
"""The setup and build script for the python-telegram-bot library."""
import codecs
import os
from setuptools import setup, find_packages
def requirements():
"""Build the requirements list for this project"""
requirements_list = []
with open('requirements.txt') as requirements:
for install in requirements:
requirements_list.append(install.strip())
return requirements_list
packages = find_packages(exclude=['tests*'])
with codecs.open('README.rst', 'r', 'utf-8') as fd:
fn = os.path.join('telegram', 'version.py')
with open(fn) as fh:
code = compile(fh.read(), fn, 'exec')
exec(code)
setup(name='python-telegram-bot',
version=__version__,
author='Leandro Toledo',
author_email='devs@python-telegram-bot.org',
license='LGPLv3',
url='https://python-telegram-bot.org/',
keywords='python telegram bot api wrapper',
description="We have made you a wrapper you can't refuse",
long_description=fd.read(),
packages=packages,
install_requires=requirements(),
extras_require={
'json': 'ujson',
'socks': 'PySocks'
},
include_package_data=True,
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU Lesser General Public License v3 (LGPLv3)',
'Operating System :: OS Independent',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Communications :: Chat',
'Topic :: Internet',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6'
],)
| txemagon/1984 | modules/Telegram-bot-python/setup.py | Python | gpl-3.0 | 2,168 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('account', '0003_remove_userprofile_is_check'),
]
operations = [
migrations.RemoveField(
model_name='userprofile',
name='is_create',
),
migrations.RemoveField(
model_name='userprofile',
name='is_delete',
),
migrations.RemoveField(
model_name='userprofile',
name='is_modify',
),
]
| yangxianbo/jym | account/migrations/0004_auto_20160525_1032.py | Python | gpl-3.0 | 591 |
import io
import openpyxl
from django.test import (
Client, TestCase
)
from django.urls import reverse
from core.models import (
User, Batch, Section, Election, Candidate, CandidateParty,
CandidatePosition, Vote, VoterProfile, Setting, UserType
)
class ResultsExporter(TestCase):
"""
Tests the results xlsx exporter view.
This subview may only process requests from logged in admin users. Other
users will be redirected to '/'. This will also only accept GET requests.
GET requests may have an election`parameter whose value must be the id
of an election. The lack of an election parameter will result in the
results of all elections to be exported, with each election having its
own worksheet. Other URL parameters will be ignored. Invalid election
parameter values, e.g. non-existent election IDs and non-integer parameters,
will return an error message.
View URL: '/results/export'
"""
@classmethod
def setUpTestData(cls):
batch_num = 0
section_num = 0
voter_num = 0
party_num = 0
position_num = 0
candidate_num = 0
num_elections = 2
voters = list()
positions = dict()
for i in range(num_elections):
election = Election.objects.create(name='Election {}'.format(i))
positions[str(election.name)] = list()
num_batches = 2
for j in range(num_batches):
batch = Batch.objects.create(year=batch_num, election=election)
batch_num += 1
num_sections = 2 if j == 0 else 1
for k in range(num_sections):
section = Section.objects.create(
section_name=str(section_num)
)
section_num += 1
num_students = 2
for l in range(num_students):
voter = User.objects.create(
username='user{}'.format(voter_num),
first_name=str(voter_num),
last_name=str(voter_num),
type=UserType.VOTER
)
voter.set_password('voter')
voter.save()
voter_num += 1
VoterProfile.objects.create(
user=voter,
batch=batch,
section=section
)
voters.append(voter)
num_positions = 3
for i in range(num_positions):
position = CandidatePosition.objects.create(
position_name='Position {}'.format(position_num),
election=election
)
positions[str(election.name)].append(position)
position_num += 1
num_parties = 3
for j in range(num_parties):
party = CandidateParty.objects.create(
party_name='Party {}'.format(party_num),
election=election
)
party_num += 1
if j != 2: # Let every third party have no candidates.
num_positions = 3
for k in range(num_positions):
position = positions[str(election.name)][k]
candidate = Candidate.objects.create(
user=voters[candidate_num],
party=party,
position=position,
election=election
)
Vote.objects.create(
user=voters[candidate_num],
candidate=candidate,
election=election
)
candidate_num += 1
# Let's give one candidate an additional vote to really make sure that
# we all got the correct number of votes.
Vote.objects.create(
user=voters[0],
# NOTE: The voter in voter[1] is a Position 1 candidate of
# Party 1, where the voter in voter[0] is a member.
candidate=Candidate.objects.get(user=voters[1]),
election=Election.objects.get(name='Election 0')
)
_admin = User.objects.create(username='admin', type=UserType.ADMIN)
_admin.set_password('root')
_admin.save()
def setUp(self):
self.client.login(username='admin', password='root')
def test_anonymous_get_requests_redirected_to_index(self):
self.client.logout()
response = self.client.get(reverse('results-export'), follow=True)
self.assertRedirects(response, '/?next=%2Fadmin%2Fresults')
def test_voter_get_requests_redirected_to_index(self):
self.client.logout()
self.client.login(username='user0', password='voter')
response = self.client.get(reverse('results-export'), follow=True)
self.assertRedirects(response, reverse('index'))
def test_get_all_elections_xlsx(self):
response = self.client.get(reverse('results-export'))
self.assertEqual(response.status_code, 200)
self.assertEqual(
response['Content-Disposition'],
'attachment; filename="Election Results.xlsx"'
)
wb = openpyxl.load_workbook(io.BytesIO(response.content))
self.assertEqual(len(wb.worksheets), 2)
# Check first worksheet.
ws = wb.worksheets[0]
self.assertEqual(wb.sheetnames[0], 'Election 0')
row_count = ws.max_row
col_count = ws.max_column
self.assertEqual(row_count, 25)
self.assertEqual(col_count, 5)
self.assertEqual(str(ws.cell(1, 1).value), 'Election 0 Results')
self.assertEqual(str(ws.cell(2, 1).value), 'Candidates')
cellContents = [
'Position 0',
'Party 0',
'0, 0',
'Party 1',
'3, 3',
'Party 2',
'None',
'Position 1',
'Party 0',
'1, 1',
'Party 1',
'4, 4',
'Party 2',
'None',
'Position 2',
'Party 0',
'2, 2',
'Party 1',
'5, 5',
'Party 2',
'None'
]
for cellIndex, content in enumerate(cellContents, 5):
self.assertEqual(str(ws.cell(cellIndex, 1).value), content)
self.assertEqual(str(ws.cell(2, 2).value), 'Number of Votes')
self.assertEqual(str(ws.cell(3, 2).value), '0')
self.assertEqual(str(ws.cell(4, 2).value), '0') # Section
self.assertEqual(str(ws.cell(7, 2).value), '1')
self.assertEqual(str(ws.cell(9, 2).value), '0')
self.assertEqual(str(ws.cell(11, 2).value), 'N/A')
self.assertEqual(str(ws.cell(14, 2).value), '2')
self.assertEqual(str(ws.cell(16, 2).value), '0')
self.assertEqual(str(ws.cell(18, 2).value), 'N/A')
self.assertEqual(str(ws.cell(21, 2).value), '0')
self.assertEqual(str(ws.cell(23, 2).value), '0')
self.assertEqual(str(ws.cell(25, 2).value), 'N/A')
self.assertEqual(str(ws.cell(4, 3).value), '1') # Section
self.assertEqual(str(ws.cell(7, 3).value), '0')
self.assertEqual(str(ws.cell(9, 3).value), '1')
self.assertEqual(str(ws.cell(11, 2).value), 'N/A')
self.assertEqual(str(ws.cell(14, 3).value), '0')
self.assertEqual(str(ws.cell(16, 3).value), '0')
self.assertEqual(str(ws.cell(18, 2).value), 'N/A')
self.assertEqual(str(ws.cell(21, 3).value), '1')
self.assertEqual(str(ws.cell(23, 3).value), '0')
self.assertEqual(str(ws.cell(25, 2).value), 'N/A')
self.assertEqual(str(ws.cell(3, 4).value), '1')
self.assertEqual(str(ws.cell(4, 4).value), '2') # Section
self.assertEqual(str(ws.cell(7, 4).value), '0')
self.assertEqual(str(ws.cell(9, 4).value), '0')
self.assertEqual(str(ws.cell(11, 2).value), 'N/A')
self.assertEqual(str(ws.cell(14, 4).value), '0')
self.assertEqual(str(ws.cell(16, 4).value), '1')
self.assertEqual(str(ws.cell(18, 2).value), 'N/A')
self.assertEqual(str(ws.cell(21, 4).value), '0')
self.assertEqual(str(ws.cell(23, 4).value), '1')
self.assertEqual(str(ws.cell(25, 2).value), 'N/A')
self.assertEqual(str(ws.cell(3, 5).value), 'Total Votes')
self.assertEqual(str(ws.cell(7, 5).value), '1')
self.assertEqual(str(ws.cell(9, 5).value), '1')
self.assertEqual(str(ws.cell(11, 2).value), 'N/A')
self.assertEqual(str(ws.cell(14, 5).value), '2')
self.assertEqual(str(ws.cell(16, 5).value), '1')
self.assertEqual(str(ws.cell(18, 2).value), 'N/A')
self.assertEqual(str(ws.cell(21, 5).value), '1')
self.assertEqual(str(ws.cell(23, 5).value), '1')
self.assertEqual(str(ws.cell(25, 2).value), 'N/A')
# Check second worksheet.
ws = wb.worksheets[1]
self.assertEqual(wb.sheetnames[1], 'Election 1')
row_count = ws.max_row
col_count = ws.max_column
self.assertEqual(row_count, 25)
self.assertEqual(col_count, 5)
self.assertEqual(str(ws.cell(1, 1).value), 'Election 1 Results')
self.assertEqual(str(ws.cell(2, 1).value), 'Candidates')
self.assertEqual(str(ws.cell(2, 1).value), 'Candidates')
cellContents = [
'Position 3',
'Party 3',
'6, 6',
'Party 4',
'9, 9',
'Party 5',
'None',
'Position 4',
'Party 3',
'7, 7',
'Party 4',
'10, 10',
'Party 5',
'None',
'Position 5',
'Party 3',
'8, 8',
'Party 4',
'11, 11',
'Party 5',
'None'
]
for cellIndex, content in enumerate(cellContents, 5):
self.assertEqual(str(ws.cell(cellIndex, 1).value), content)
self.assertEqual(str(ws.cell(2, 2).value), 'Number of Votes')
self.assertEqual(str(ws.cell(3, 2).value), '2')
self.assertEqual(str(ws.cell(4, 2).value), '3') # Section
self.assertEqual(str(ws.cell(7, 2).value), '1')
self.assertEqual(str(ws.cell(9, 2).value), '0')
self.assertEqual(str(ws.cell(11, 2).value), 'N/A')
self.assertEqual(str(ws.cell(14, 2).value), '1')
self.assertEqual(str(ws.cell(16, 2).value), '0')
self.assertEqual(str(ws.cell(18, 2).value), 'N/A')
self.assertEqual(str(ws.cell(21, 2).value), '0')
self.assertEqual(str(ws.cell(23, 2).value), '0')
self.assertEqual(str(ws.cell(25, 2).value), 'N/A')
self.assertEqual(str(ws.cell(4, 3).value), '4') # Section
self.assertEqual(str(ws.cell(7, 3).value), '0')
self.assertEqual(str(ws.cell(9, 3).value), '1')
self.assertEqual(str(ws.cell(11, 2).value), 'N/A')
self.assertEqual(str(ws.cell(14, 3).value), '0')
self.assertEqual(str(ws.cell(16, 3).value), '0')
self.assertEqual(str(ws.cell(18, 2).value), 'N/A')
self.assertEqual(str(ws.cell(21, 3).value), '1')
self.assertEqual(str(ws.cell(23, 3).value), '0')
self.assertEqual(str(ws.cell(25, 2).value), 'N/A')
self.assertEqual(str(ws.cell(3, 4).value), '3')
self.assertEqual(str(ws.cell(4, 4).value), '5') # Section
self.assertEqual(str(ws.cell(7, 4).value), '0')
self.assertEqual(str(ws.cell(9, 4).value), '0')
self.assertEqual(str(ws.cell(11, 2).value), 'N/A')
self.assertEqual(str(ws.cell(14, 4).value), '0')
self.assertEqual(str(ws.cell(16, 4).value), '1')
self.assertEqual(str(ws.cell(18, 2).value), 'N/A')
self.assertEqual(str(ws.cell(21, 4).value), '0')
self.assertEqual(str(ws.cell(23, 4).value), '1')
self.assertEqual(str(ws.cell(25, 2).value), 'N/A')
self.assertEqual(str(ws.cell(3, 5).value), 'Total Votes')
self.assertEqual(str(ws.cell(7, 5).value), '1')
self.assertEqual(str(ws.cell(9, 5).value), '1')
self.assertEqual(str(ws.cell(11, 2).value), 'N/A')
self.assertEqual(str(ws.cell(14, 5).value), '1')
self.assertEqual(str(ws.cell(16, 5).value), '1')
self.assertEqual(str(ws.cell(18, 2).value), 'N/A')
self.assertEqual(str(ws.cell(21, 5).value), '1')
self.assertEqual(str(ws.cell(23, 5).value), '1')
self.assertEqual(str(ws.cell(25, 2).value), 'N/A')
def test_get_election0_xlsx(self):
response = self.client.get(
reverse('results-export'),
{ 'election': str(Election.objects.get(name='Election 0').id) }
)
self.assertEqual(response.status_code, 200)
self.assertEqual(
response['Content-Disposition'],
'attachment; filename="Election 0 Results.xlsx"'
)
wb = openpyxl.load_workbook(io.BytesIO(response.content))
self.assertEqual(len(wb.worksheets), 1)
# Check first worksheet.
ws = wb.worksheets[0]
self.assertEqual(wb.sheetnames[0], 'Election 0')
row_count = ws.max_row
col_count = ws.max_column
self.assertEqual(row_count, 25)
self.assertEqual(col_count, 5)
self.assertEqual(str(ws.cell(1, 1).value), 'Election 0 Results')
self.assertEqual(str(ws.cell(2, 1).value), 'Candidates')
cellContents = [
'Position 0',
'Party 0',
'0, 0',
'Party 1',
'3, 3',
'Party 2',
'None',
'Position 1',
'Party 0',
'1, 1',
'Party 1',
'4, 4',
'Party 2',
'None',
'Position 2',
'Party 0',
'2, 2',
'Party 1',
'5, 5',
'Party 2',
'None'
]
for cellIndex, content in enumerate(cellContents, 5):
self.assertEqual(str(ws.cell(cellIndex, 1).value), content)
self.assertEqual(str(ws.cell(2, 2).value), 'Number of Votes')
self.assertEqual(str(ws.cell(3, 2).value), '0')
self.assertEqual(str(ws.cell(4, 2).value), '0') # Section
self.assertEqual(str(ws.cell(7, 2).value), '1')
self.assertEqual(str(ws.cell(9, 2).value), '0')
self.assertEqual(str(ws.cell(11, 2).value), 'N/A')
self.assertEqual(str(ws.cell(14, 2).value), '2')
self.assertEqual(str(ws.cell(16, 2).value), '0')
self.assertEqual(str(ws.cell(18, 2).value), 'N/A')
self.assertEqual(str(ws.cell(21, 2).value), '0')
self.assertEqual(str(ws.cell(23, 2).value), '0')
self.assertEqual(str(ws.cell(25, 2).value), 'N/A')
self.assertEqual(str(ws.cell(4, 3).value), '1') # Section
self.assertEqual(str(ws.cell(7, 3).value), '0')
self.assertEqual(str(ws.cell(9, 3).value), '1')
self.assertEqual(str(ws.cell(11, 2).value), 'N/A')
self.assertEqual(str(ws.cell(14, 3).value), '0')
self.assertEqual(str(ws.cell(16, 3).value), '0')
self.assertEqual(str(ws.cell(18, 2).value), 'N/A')
self.assertEqual(str(ws.cell(21, 3).value), '1')
self.assertEqual(str(ws.cell(23, 3).value), '0')
self.assertEqual(str(ws.cell(25, 2).value), 'N/A')
self.assertEqual(str(ws.cell(3, 4).value), '1')
self.assertEqual(str(ws.cell(4, 4).value), '2') # Section
self.assertEqual(str(ws.cell(7, 4).value), '0')
self.assertEqual(str(ws.cell(9, 4).value), '0')
self.assertEqual(str(ws.cell(11, 2).value), 'N/A')
self.assertEqual(str(ws.cell(14, 4).value), '0')
self.assertEqual(str(ws.cell(16, 4).value), '1')
self.assertEqual(str(ws.cell(18, 2).value), 'N/A')
self.assertEqual(str(ws.cell(21, 4).value), '0')
self.assertEqual(str(ws.cell(23, 4).value), '1')
self.assertEqual(str(ws.cell(25, 2).value), 'N/A')
self.assertEqual(str(ws.cell(3, 5).value), 'Total Votes')
self.assertEqual(str(ws.cell(7, 5).value), '1')
self.assertEqual(str(ws.cell(9, 5).value), '1')
self.assertEqual(str(ws.cell(11, 2).value), 'N/A')
self.assertEqual(str(ws.cell(14, 5).value), '2')
self.assertEqual(str(ws.cell(16, 5).value), '1')
self.assertEqual(str(ws.cell(18, 2).value), 'N/A')
self.assertEqual(str(ws.cell(21, 5).value), '1')
self.assertEqual(str(ws.cell(23, 5).value), '1')
self.assertEqual(str(ws.cell(25, 2).value), 'N/A')
def test_get_with_invalid_election_id_non_existent_election_id(self):
response = self.client.get(
reverse('results-export'),
{ 'election': '69' },
HTTP_REFERER=reverse('results'),
follow=True
)
messages = list(response.context['messages'])
self.assertEqual(
messages[0].message,
'You specified an ID for a non-existent election.'
)
self.assertRedirects(response, reverse('results'))
def test_get_with_invalid_election_id_non_integer_election_id(self):
response = self.client.get(
reverse('results-export'),
{ 'election': 'hey' },
HTTP_REFERER=reverse('results'),
follow=True
)
messages = list(response.context['messages'])
self.assertEqual(
messages[0].message,
'You specified a non-integer election ID.'
)
self.assertRedirects(response, reverse('results'))
def test_ref_get_with_invalid_election_id_non_existent_election_id(self):
response = self.client.get(
reverse('results-export'),
{ 'election': '69' },
HTTP_REFERER=reverse('results'),
follow=True
)
messages = list(response.context['messages'])
self.assertEqual(
messages[0].message,
'You specified an ID for a non-existent election.'
)
self.assertRedirects(response, reverse('results'))
def test_ref_get_with_invalid_election_id_non_integer_election_id(self):
response = self.client.get(
reverse('results-export'),
{ 'election': 'hey' },
HTTP_REFERER=reverse('results'),
follow=True
)
messages = list(response.context['messages'])
self.assertEqual(
messages[0].message,
'You specified a non-integer election ID.'
)
self.assertRedirects(response, reverse('results'))
| seanballais/botos | tests/test_results_exporter_view.py | Python | gpl-3.0 | 18,942 |
from mercurial import cmdutil
_hgignore_content = """\
syntax: glob
*~
*.pyc
*.pyo
*.bak
cache/*
databases/*
sessions/*
errors/*
"""
def commit():
app = request.args[0]
path = apath(app, r=request)
uio = ui.ui()
uio.quiet = True
if not os.environ.get('HGUSER') and not uio.config("ui", "username"):
os.environ['HGUSER'] = 'web2py@localhost'
try:
r = hg.repository(ui=uio, path=path)
except:
r = hg.repository(ui=uio, path=path, create=True)
hgignore = os.path.join(path, '.hgignore')
if not os.path.exists(hgignore):
open(hgignore, 'w').write(_hgignore_content)
form = FORM('Comment:',INPUT(_name='comment',requires=IS_NOT_EMPTY()),
INPUT(_type='submit',_value='Commit'))
if form.accepts(request.vars,session):
oldid = r[r.lookup('.')]
cmdutil.addremove(r)
r.commit(text=form.vars.comment)
if r[r.lookup('.')] == oldid:
response.flash = 'no changes'
files = r[r.lookup('.')].files()
return dict(form=form,files=TABLE(*[TR(file) for file in files]),repo=r)
| henkelis/sonospy | web2py/applications/admin/controllers/mercurial.py | Python | gpl-3.0 | 1,107 |
# coding=utf-8
"""InaSAFE Disaster risk tool by Australian Aid - Flood Raster Impact on
Population.
Contact : ole.moller.nielsen@gmail.com
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
__author__ = 'Rizky Maulana Nugraha'
from safe.common.utilities import OrderedDict
from safe.defaults import (
default_minimum_needs,
default_gender_postprocessor,
age_postprocessor,
minimum_needs_selector)
from safe.impact_functions.impact_function_metadata import \
ImpactFunctionMetadata
from safe.utilities.i18n import tr
from safe.definitions import (
layer_mode_continuous,
layer_geometry_raster,
hazard_flood,
hazard_category_single_event,
unit_metres,
unit_feet,
count_exposure_unit,
exposure_population
)
class FloodEvacuationRasterHazardMetadata(ImpactFunctionMetadata):
"""Metadata for FloodEvacuationFunction.
.. versionadded:: 2.1
We only need to re-implement as_dict(), all other behaviours
are inherited from the abstract base class.
"""
@staticmethod
def as_dict():
"""Return metadata as a dictionary.
This is a static method. You can use it to get the metadata in
dictionary format for an impact function.
:returns: A dictionary representing all the metadata for the
concrete impact function.
:rtype: dict
"""
dict_meta = {
'id': 'FloodEvacuationRasterHazardFunction',
'name': tr('Raster flood on population'),
'impact': tr('Need evacuation'),
'title': tr('Need evacuation'),
'function_type': 'old-style',
'author': 'AIFDR',
'date_implemented': 'N/A',
'overview': tr(
'To assess the impacts of flood inundation in raster '
'format on population.'),
'detailed_description': tr(
'The population subject to inundation exceeding a '
'threshold (default 1m) is calculated and returned as a '
'raster layer. In addition the total number of affected '
'people and the required needs based on the user '
'defined minimum needs are reported. The threshold can be '
'changed and even contain multiple numbers in which case '
'evacuation and needs are calculated using the largest number '
'with population breakdowns provided for the smaller numbers. '
'The population raster is resampled to the resolution of the '
'hazard raster and is rescaled so that the resampled '
'population counts reflect estimates of population count '
'per resampled cell. The resulting impact layer has the '
'same resolution and reflects population count per cell '
'which are affected by inundation.'),
'hazard_input': tr(
'A hazard raster layer where each cell represents flood '
'depth (in meters).'),
'exposure_input': tr(
'An exposure raster layer where each cell represent '
'population count.'),
'output': tr(
'Raster layer contains people affected and the minimum '
'needs based on the people affected.'),
'actions': tr(
'Provide details about how many people would likely need '
'to be evacuated, where they are located and what '
'resources would be required to support them.'),
'limitations': [
tr('The default threshold of 1 meter was selected based '
'on consensus, not hard evidence.')
],
'citations': [],
'layer_requirements': {
'hazard': {
'layer_mode': layer_mode_continuous,
'layer_geometries': [layer_geometry_raster],
'hazard_categories': [hazard_category_single_event],
'hazard_types': [hazard_flood],
'continuous_hazard_units': [unit_feet, unit_metres],
'vector_hazard_classifications': [],
'raster_hazard_classifications': [],
'additional_keywords': []
},
'exposure': {
'layer_mode': layer_mode_continuous,
'layer_geometries': [layer_geometry_raster],
'exposure_types': [exposure_population],
'exposure_units': [count_exposure_unit],
'exposure_class_fields': [],
'additional_keywords': []
}
},
'parameters': OrderedDict([
('thresholds [m]', [1.0]),
('postprocessors', OrderedDict([
('Gender', default_gender_postprocessor()),
('Age', age_postprocessor()),
('MinimumNeeds', minimum_needs_selector()),
])),
('minimum needs', default_minimum_needs())
])
}
return dict_meta
| wonder-sk/inasafe | safe/impact_functions/inundation/flood_raster_population/metadata_definitions.py | Python | gpl-3.0 | 5,408 |
# -*- coding: utf-8 -*-
from django.shortcuts import render
from django.http import HttpResponse
from django.http import HttpResponseRedirect
from django.core.urlresolvers import reverse
from django.contrib.auth.decorators import login_required
def centres(request):
#Python练习项目管理中心Center
return render(request, 'centres/centres.html')
def upload(request):
#文件上传
return render(request, 'centres/upload.html')
def uploadfile(request):
import os
if request.method == "POST": # 请求方法为POST时,进行处理
myFile =request.FILES.get("myfile", None) # 获取上传的文件,如果没有文件,则默认为None
if not myFile:
#return HttpResponse("no files for upload!")
return render(request, 'centres/upload.html',{'what':'no file for upload!'})
upfile = open(os.path.join("D:\\xHome\\data\\upload",myFile.name),'wb+') # 打开特定的文件进行二进制的写操作
for chunk in myFile.chunks(): # 分块写入文件
upfile.write(chunk)
upfile.close()
#return HttpResponse("upload over!")
return render(request, 'centres/upload.html', {'what':'upload over!'}) | xBoye/xHome | centres/views.py | Python | gpl-3.0 | 1,162 |
import cv2
import numpy as np
np.set_printoptions(threshold=np.nan)
import util as util
import edge_detect
import lineseg
import drawedgelist
# img = cv2.imread("img/Slide2.jpg", 0)
img = cv2.imread("unsorted/Unit Tests/lambda.png", 0)
im_size = img.shape
returnedCanny = cv2.Canny(img, 50, 150, apertureSize = 3)
cv2.imshow("newcanny", returnedCanny)
skel_dst = util.morpho(returnedCanny)
out = edge_detect.mask_contours(edge_detect.create_img(skel_dst))
res = []
# print(np.squeeze(out[0]))
# print(out[0][0])
for i in range(len(out)):
# Add the first point to the end so the shape closes
current = np.squeeze(out[i])
# print('current', current)
# print('first', out[i][0])
if current.shape[0] > 2:
# res.append(np.concatenate((current, out[i][0])))
# print(res[-1])
res.append(current)
# print(np.concatenate((np.squeeze(out[i]), out[i][0])))
res = np.array(res)
util.sqz_contours(res)
res = lineseg.lineseg(np.array([res[1]]), tol=5)
print(res, "res")
"""
for x in range(len(res)):
for y in range(lan ):
"""
drawedgelist.drawedgelist(res, img)
"""
seglist = []
for i in range(res.shape[0]):
# print('shape', res[i].shape)
if res[i].shape[0] > 2:
# print(res[i])
# print(res[i][0])
seglist.append(np.concatenate((res[i], [res[i][0]])))
else:
seglist.append(res[i])
seglist = np.array(seglist)
"""
#print(seglist, "seglist")
#print(len(seglist), "seglist len")
#print(seglist.shape, "seglistshape")
#drawedgelist.drawedgelist(seglist)
"""
# ******* SECTION 2 *******
# SEGMENT AND LABEL THE CURVATURE LINES (CONVEX/CONCAVE).
LineFeature, ListPoint = Lseg_to_Lfeat_v4.create_linefeatures(seglist, res, im_size)
Line_new, ListPoint_new, line_merged = merge_lines_v4.merge_lines(LineFeature, ListPoint, 10, im_size)
#print(Line_new, "line new")
print(len(Line_new), "len line new")
util.draw_lf(Line_new, blank_image)
line_newC = LabelLineCurveFeature_v4.classify_curves(img, Line_new, ListPoint_new, 11)""" | Jordan-Zhu/RoboVision | scratch/testingColorImg.py | Python | gpl-3.0 | 2,023 |
#!/usr/bin/env python
# setup of the grid parameters
# default queue used for training
training_queue = { 'queue':'q1dm', 'memfree':'16G', 'pe_opt':'pe_mth 2', 'hvmem':'8G', 'io_big':True }
# the queue that is used solely for the final ISV training step
isv_training_queue = { 'queue':'q1wm', 'memfree':'32G', 'pe_opt':'pe_mth 4', 'hvmem':'8G' }
# number of audio files that one job should preprocess
number_of_audio_files_per_job = 1000
preprocessing_queue = {}
# number of features that one job should extract
number_of_features_per_job = 600
extraction_queue = { 'queue':'q1d', 'memfree':'8G' }
# number of features that one job should project
number_of_projections_per_job = 600
projection_queue = { 'queue':'q1d', 'hvmem':'8G', 'memfree':'8G' }
# number of models that one job should enroll
number_of_models_per_enrol_job = 20
enrol_queue = { 'queue':'q1d', 'memfree':'4G', 'io_big':True }
# number of models that one score job should process
number_of_models_per_score_job = 20
score_queue = { 'queue':'q1d', 'memfree':'4G', 'io_big':True }
grid_type = 'local' # on Idiap grid
| guker/spear | config/grid/para_training_local.py | Python | gpl-3.0 | 1,092 |
#!/usr/bin/python2
# -*- coding: utf-8 -*-
# coding=utf-8
import unittest
from datetime import datetime
from lib.escala import Escala
import dirs
dirs.DEFAULT_DIR = dirs.TestDir()
class FrameTest(unittest.TestCase):
def setUp(self):
self.escala = Escala('fixtures/escala.xml')
self.dir = dirs.TestDir()
self.maxDiff = None
def tearDown(self):
pass
def test_attributos_voo_1(self):
p_voo = self.escala.escalas[0]
self.assertEqual(p_voo.activity_date, datetime(2013, 3, 1, 11, 36))
self.assertEqual(p_voo.present_location, 'VCP')
self.assertEqual(p_voo.flight_no, '4148')
self.assertEqual(p_voo.origin, 'VCP')
self.assertEqual(p_voo.destination, 'GYN')
self.assertEqual(p_voo.actype, 'E95')
self.assertTrue(p_voo.checkin)
self.assertEqual(p_voo.checkin_time, datetime(2013, 3, 1, 10, 36))
self.assertEqual(p_voo.std, datetime(2013, 3, 1, 13, 13))
self.assertEqual(p_voo.sta, datetime(2013, 3, 1, 11, 36))
self.assertEqual(p_voo.activity_info, 'AD4148')
self.assertFalse(p_voo.duty_design)
def test_attributos_voo_17(self):
p_voo = self.escala.escalas[17]
self.assertEqual(p_voo.activity_date, datetime(2013, 10, 28, 3, 0))
self.assertEqual(p_voo.present_location, 'VCP')
self.assertEqual(p_voo.flight_no, None)
self.assertEqual(p_voo.origin, 'VCP')
self.assertEqual(p_voo.destination, 'VCP')
self.assertEqual(p_voo.activity_info, 'P04')
self.assertEqual(p_voo.actype, None)
self.assertEqual(p_voo.sta, datetime(2013, 10, 28, 3, 0))
self.assertEqual(p_voo.std, datetime(2013, 10, 28, 15, 0))
self.assertFalse(p_voo.checkin)
self.assertEqual(p_voo.checkin_time, None)
self.assertFalse(p_voo.duty_design)
def test_attributos_voo_18(self):
p_voo = self.escala.escalas[18]
self.assertEqual(p_voo.activity_date, datetime(2013, 10, 29, 4, 58))
self.assertEqual(p_voo.present_location, 'VCP')
self.assertEqual(p_voo.flight_no, '4050')
self.assertEqual(p_voo.origin, 'VCP')
self.assertEqual(p_voo.destination, 'FLN')
self.assertEqual(p_voo.activity_info, 'AD4050')
self.assertEqual(p_voo.actype, 'E95')
self.assertEqual(p_voo.sta, datetime(2013, 10, 29, 4, 58))
self.assertEqual(p_voo.std, datetime(2013, 10, 29, 6, 15))
self.assertTrue(p_voo.checkin)
self.assertEqual(p_voo.checkin_time, datetime(2013, 10, 29, 5, 8))
self.assertFalse(p_voo.duty_design)
self.assertEqual(p_voo.horas_de_voo, '1:17')
def test_attributos_quarto_voo(self):
p_voo = self.escala.escalas[25]
self.assertFalse(p_voo.checkin)
self.assertEqual(p_voo.checkin_time, None)
self.assertEqual(p_voo.flight_no, '2872')
self.assertEqual(p_voo.activity_info, 'AD2872')
def test_calculo_horas_voadas(self):
s_horas = {
'h_diurno': '6:40',
'h_noturno': '6:47',
'h_total_voo': '13:27',
'h_faixa2': '0:00',
'h_sobreaviso': '40:00',
'h_reserva': '29:13'
}
self.assertEqual(self.escala.soma_horas(), s_horas)
def test_ics(self):
"""
Check ICS output
"""
escala = Escala('fixtures/escala_ics.xml')
f_result = open(self.dir.get_data_dir() + 'fixtures/escala.ics')
self.assertEqual(escala.ics(), f_result.read())
f_result.close()
def test_csv(self):
"""
Check CSV output
"""
f_result = open(self.dir.get_data_dir() + 'fixtures/escala.csv')
self.assertEqual(self.escala.csv(), f_result.read())
f_result.close()
def main():
unittest.main()
if __name__ == '__main__':
main()
| camponez/importescala | test/test_escala.py | Python | gpl-3.0 | 3,876 |
from test_support import *
# this test calls a prover which is correctly configured but whose execution
# gives an error (here: the prover executable doesn't exist). The intent is to
# test the output of gnatprove in this specific case
prove_all(prover=["plop"], opt=["--why3-conf=test.conf"])
| ptroja/spark2014 | testsuite/gnatprove/tests/N804-036__bad_prover/test.py | Python | gpl-3.0 | 296 |
from itertools import combinations
def is_good(n):
return 1 + ((int(n) - 1) % 9) == 9
def generate_subsequences(n):
subsequences = []
combinations_list = []
index = 4
#Generate all combinations
while index > 0:
combinations_list.append(list(combinations(str(n), index)))
index -= 1
#Formatting combinations
for index in combinations_list:
for combination in index:
subsequences.append(''.join(combination))
return subsequences
if __name__ == '__main__':
#The modulo
modulo = ((10 ** 9) + 7)
#Get number of cases
cases = int(raw_input())
while cases > 0:
value = raw_input()
good_subsequences = 0
for sub in generate_subsequences(value):
if is_good(sub):
good_subsequences += 1
print (good_subsequences % modulo)-1
cases -= 1
| Dawny33/Code | HackerEarth/BeCoder 2/nine.py | Python | gpl-3.0 | 882 |
#coding=utf-8
import unittest
import HTMLTestRunner
import time
from config import globalparam
from public.common import sendmail
def run():
test_dir = './testcase'
suite = unittest.defaultTestLoader.discover(start_dir=test_dir,pattern='test*.py')
now = time.strftime('%Y-%m-%d_%H_%M_%S')
reportname = globalparam.report_path + '\\' + 'TestResult' + now + '.html'
with open(reportname,'wb') as f:
runner = HTMLTestRunner.HTMLTestRunner(
stream=f,
title='测试报告',
description='Test the import testcase'
)
runner.run(suite)
time.sleep(3)
# 发送邮件
mail = sendmail.SendMail()
mail.send()
if __name__=='__main__':
run() | lsp84ch83/PyText | UItestframework/run.py | Python | gpl-3.0 | 730 |
# -*- coding: utf-8 -*-
from pyload.plugin.internal.DeadCrypter import DeadCrypter
class FiredriveCom(DeadCrypter):
__name = "FiredriveCom"
__type = "crypter"
__version = "0.03"
__pattern = r'https?://(?:www\.)?(firedrive|putlocker)\.com/share/.+'
__config = [] #@TODO: Remove in 0.4.10
__description = """Firedrive.com folder decrypter plugin"""
__license = "GPLv3"
__authors = [("Walter Purcaro", "vuolter@gmail.com")]
| ardi69/pyload-0.4.10 | pyload/plugin/crypter/FiredriveCom.py | Python | gpl-3.0 | 474 |
import queue
import logging
import platform
import threading
import datetime as dt
import serial
import serial.threaded
import serial_device
from .or_event import OrEvent
logger = logging.getLogger(__name__)
# Flag to indicate whether queues should be polled.
# XXX Note that polling performance may vary by platform.
POLL_QUEUES = (platform.system() == 'Windows')
class EventProtocol(serial.threaded.Protocol):
def __init__(self):
self.transport = None
self.connected = threading.Event()
self.disconnected = threading.Event()
self.port = None
def connection_made(self, transport):
"""Called when reader thread is started"""
self.port = transport.serial.port
logger.debug('connection_made: `%s` `%s`', self.port, transport)
self.transport = transport
self.connected.set()
self.disconnected.clear()
def data_received(self, data):
"""Called with snippets received from the serial port"""
raise NotImplementedError
def connection_lost(self, exception):
"""\
Called when the serial port is closed or the reader loop terminated
otherwise.
"""
if isinstance(exception, Exception):
logger.debug('Connection to port `%s` lost: %s', self.port,
exception)
else:
logger.debug('Connection to port `%s` closed', self.port)
self.connected.clear()
self.disconnected.set()
class KeepAliveReader(threading.Thread):
'''
Keep a serial connection alive (as much as possible).
Parameters
----------
state : dict
State dictionary to share ``protocol`` object reference.
comport : str
Name of com port to connect to.
default_timeout_s : float, optional
Default time to wait for serial operation (e.g., connect).
By default, block (i.e., no time out).
**kwargs
Keyword arguments passed to ``serial_for_url`` function, e.g.,
``baudrate``, etc.
'''
def __init__(self, protocol_class, comport, **kwargs):
super(KeepAliveReader, self).__init__()
self.daemon = True
self.protocol_class = protocol_class
self.comport = comport
self.kwargs = kwargs
self.protocol = None
self.default_timeout_s = kwargs.pop('default_timeout_s', None)
# Event to indicate serial connection has been established.
self.connected = threading.Event()
# Event to request a break from the run loop.
self.close_request = threading.Event()
# Event to indicate thread has been closed.
self.closed = threading.Event()
# Event to indicate an exception has occurred.
self.error = threading.Event()
# Event to indicate that the thread has connected to the specified port
# **at least once**.
self.has_connected = threading.Event()
@property
def alive(self):
return not self.closed.is_set()
def run(self):
# Verify requested serial port is available.
try:
if self.comport not in (serial_device
.comports(only_available=True).index):
raise NameError('Port `%s` not available. Available ports: '
'`%s`' % (self.comport,
', '.join(serial_device.comports()
.index)))
except NameError as exception:
self.error.exception = exception
self.error.set()
self.closed.set()
return
while True:
# Wait for requested serial port to become available.
while self.comport not in (serial_device
.comports(only_available=True).index):
# Assume serial port was disconnected temporarily. Wait and
# periodically check again.
self.close_request.wait(2)
if self.close_request.is_set():
# No connection is open, so nothing to close. Just quit.
self.closed.set()
return
try:
# Try to open serial device and monitor connection status.
logger.debug('Open `%s` and monitor connection status',
self.comport)
device = serial.serial_for_url(self.comport, **self.kwargs)
except serial.SerialException as exception:
self.error.exception = exception
self.error.set()
self.closed.set()
return
except Exception as exception:
self.error.exception = exception
self.error.set()
self.closed.set()
return
else:
with serial.threaded.ReaderThread(device, self
.protocol_class) as protocol:
self.protocol = protocol
connected_event = OrEvent(protocol.connected,
self.close_request)
disconnected_event = OrEvent(protocol.disconnected,
self.close_request)
# Wait for connection.
connected_event.wait(None if self.has_connected.is_set()
else self.default_timeout_s)
if self.close_request.is_set():
# Quit run loop. Serial connection will be closed by
# `ReaderThread` context manager.
self.closed.set()
return
self.connected.set()
self.has_connected.set()
# Wait for disconnection.
disconnected_event.wait()
if self.close_request.is_set():
# Quit run loop.
self.closed.set()
return
self.connected.clear()
# Loop to try to reconnect to serial device.
def write(self, data, timeout_s=None):
'''
Write to serial port.
Waits for serial connection to be established before writing.
Parameters
----------
data : str or bytes
Data to write to serial port.
timeout_s : float, optional
Maximum number of seconds to wait for serial connection to be
established.
By default, block until serial connection is ready.
'''
self.connected.wait(timeout_s)
self.protocol.transport.write(data)
def request(self, response_queue, payload, timeout_s=None,
poll=POLL_QUEUES):
'''
Send
Parameters
----------
device : serial.Serial
Serial instance.
response_queue : Queue.Queue
Queue to wait for response on.
payload : str or bytes
Payload to send.
timeout_s : float, optional
Maximum time to wait (in seconds) for response.
By default, block until response is ready.
poll : bool, optional
If ``True``, poll response queue in a busy loop until response is
ready (or timeout occurs).
Polling is much more processor intensive, but (at least on Windows)
results in faster response processing. On Windows, polling is
enabled by default.
'''
self.connected.wait(timeout_s)
return request(self, response_queue, payload, timeout_s=timeout_s,
poll=poll)
def close(self):
self.close_request.set()
# - - context manager, returns protocol
def __enter__(self):
"""\
Enter context handler. May raise RuntimeError in case the connection
could not be created.
"""
self.start()
# Wait for protocol to connect.
event = OrEvent(self.connected, self.closed)
event.wait(self.default_timeout_s)
return self
def __exit__(self, *args):
"""Leave context: close port"""
self.close()
self.closed.wait()
def request(device, response_queue, payload, timeout_s=None, poll=POLL_QUEUES):
'''
Send payload to serial device and wait for response.
Parameters
----------
device : serial.Serial
Serial instance.
response_queue : Queue.Queue
Queue to wait for response on.
payload : str or bytes
Payload to send.
timeout_s : float, optional
Maximum time to wait (in seconds) for response.
By default, block until response is ready.
poll : bool, optional
If ``True``, poll response queue in a busy loop until response is
ready (or timeout occurs).
Polling is much more processor intensive, but (at least on Windows)
results in faster response processing. On Windows, polling is
enabled by default.
'''
device.write(payload)
if poll:
# Polling enabled. Wait for response in busy loop.
start = dt.datetime.now()
while not response_queue.qsize():
if (dt.datetime.now() - start).total_seconds() > timeout_s:
raise queue.Empty('No response received.')
return response_queue.get()
else:
# Polling disabled. Use blocking `Queue.get()` method to wait for
# response.
return response_queue.get(timeout=timeout_s)
| wheeler-microfluidics/serial_device | serial_device/threaded.py | Python | gpl-3.0 | 9,719 |
__author__ = "Harish Narayanan"
__copyright__ = "Copyright (C) 2009 Simula Research Laboratory and %s" % __author__
__license__ = "GNU GPL Version 3 or any later version"
from cbc.twist import *
from sys import argv
""" DEMO - Twisting of a hyperelastic cube """
class Twist(StaticHyperelasticity):
""" Definition of the hyperelastic problem """
def mesh(self):
n = 8
return UnitCubeMesh(n, n, n)
# Setting up dirichlet conditions and boundaries
def dirichlet_values(self):
clamp = Expression(("0.0", "0.0", "0.0"))
twist = Expression(("0.0",
"y0 + (x[1] - y0) * cos(theta) - (x[2] - z0) * sin(theta) - x[1]",
"z0 + (x[1] - y0) * sin(theta) + (x[2] - z0) * cos(theta) - x[2]"),
y0=0.5, z0=0.5, theta=pi/6)
return [clamp, twist]
def dirichlet_boundaries(self):
left = "x[0] == 0.0"
right = "x[0] == 1.0"
return [left, right]
# List of material models
def material_model(self):
# Material parameters can either be numbers or spatially
# varying fields. For example,
mu = 3.8461
lmbda = Expression("x[0]*5.8 + (1 - x[0])*5.7")
C10 = 0.171; C01 = 4.89e-3; C20 = -2.4e-4; C30 = 5.e-4
delka = 1.0/sqrt(2.0)
M = Constant((0.0,1.0,0.0))
k1 = 1e2; k2 = 1e1
materials = []
materials.append(MooneyRivlin({'C1':mu/2, 'C2':mu/2, 'bulk':lmbda}))
materials.append(StVenantKirchhoff({'mu':mu, 'bulk':lmbda}))
materials.append(neoHookean({'half_nkT':mu, 'bulk':lmbda}))
materials.append(Isihara({'C10':C10,'C01':C01,'C20':C20,'bulk':lmbda}))
materials.append(Biderman({'C10':C10,'C01':C01,'C20':C20,'C30':C30,'bulk':lmbda}))
materials.append(AnisoTest({'mu1':mu,'mu2':2*mu,'M':M,'bulk':lmbda}))
materials.append(GasserHolzapfelOgden({'mu':mu,'k1':k1,'k2':k2,'M':M,'bulk':lmbda}))
materials.append(Ogden({'alpha1':1.3,'alpha2':5.0,'alpha3':-2.0,\
'mu1':6.3e5,'mu2':0.012e5,'mu3':-0.1e5}))
try:
index = int(argv[1])
except:
index = 2
print str(materials[index])
return materials[index]
def name_method(self, method):
self.method = method
def __str__(self):
return "A hyperelastic cube twisted by 30 degrees solved by " + self.method
# Setup the problem
twist = Twist()
twist.name_method("DISPLACEMENT BASED FORMULATION")
# Solve the problem
print twist
twist.solve()
| hnarayanan/twist | demo/static/twist.py | Python | gpl-3.0 | 2,606 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# dépendances
import requests
import xml.dom.minidom
import sys
import signal
import os
import getopt
from queue import Queue
from threading import Thread
import time
class SetQueue(Queue):
def _init(self, maxsize):
Queue._init(self, maxsize)
self.all_items = set()
def _put(self, item):
if item not in self.all_items:
Queue._put(self, item)
self.all_items.add(item)
def signal_handler(signal, frame):
print('You pressed Ctrl+C!')
sys.exit(0)
def usage():
"""usage de la ligne de commande"""
print ("usage : " + sys.argv[0] + "-h --help -s --server someurl.com -u --user login -p --password password")
def getAtomFeed(url, login, pwd):
# var
MAX_TRY = 10
essai = 0
# get atom document
while essai < MAX_TRY:
try:
r = requests.get('http://' + url, auth=(login,pwd), timeout=10)
except:
essai += 1
continue
break
else:
raise ('Erreur lors de la requête')
# parse atom document
try:
dom = xml.dom.minidom.parseString(r.text)
except:
raise ('Erreur lors du parsing du document Atom')
return dom
def getManagerInfo(atomFeed):
try:
entries = atomFeed.getElementsByTagName('entry')[1]
except:
return None
try:
managerId = entries.getElementsByTagName('snx:userid')[0]
return managerId.firstChild.data
except:
return None
def buildUrlSearchList(server, login, pwd, q):
# var
alphabet = ['a','b','c','d','e','f','g','h','i','j','k','l','m','n','o','p','q','r','s','t','u','v','w','x','y','z']
#alphabet = ['a']
for i in alphabet:
url = server + '/profiles/atom/search.do?search=' + i + '*&ps=250'
dom = getAtomFeed(url, login, pwd)
totalResult = dom.getElementsByTagName('opensearch:totalResults')[0]
totalResult = int(totalResult.firstChild.data)
if totalResult > 250:
nbPage = int(float(totalResult) / 250) + 1
for n in range(1,nbPage,1):
item = url + "&page=" + str(n)
q.put(item)
else:
nbPage = 1
q.put(url)
def getUserIdsWorker(login, pwd, qin, qout):
while True:
url = qin.get()
if url == None:
break
qin.task_done()
try:
dom = getAtomFeed(url, login, pwd)
except:
continue
userIds = dom.getElementsByTagName('snx:userid')
for index, item, in enumerate(userIds):
qout.put(item.firstChild.data)
def getRelationsWorker(server, login, pwd, qin, qout, getManager, qmgmt):
while True:
userid = qin.get()
if userid == None:
break
qin.task_done()
url = server + '/profiles/atom/connections.do?userid=' + userid + '&connectionType=colleague&ps=250'
try:
dom = getAtomFeed(url, login, pwd)
except:
continue
feed = dom.firstChild
entries = feed.getElementsByTagName('entry')
for entry in entries:
# get date
dateRelation = entry.getElementsByTagName('updated')[0]
dateRelation = dateRelation.firstChild.data
dateRelation = dateRelation[:10]
# get author user id
author = entry.getElementsByTagName('author')[0]
try:
authorName = author.getElementsByTagName('name')[0]
authorName = authorName.firstChild.data
except:
authorName = ""
try:
authorEMail = author.getElementsByTagName('email')[0]
authorEMail = authorEMail.firstChild.data
except:
authorEMail = ""
authorUserId = author.getElementsByTagName('snx:userid')[0]
authorUserId = authorUserId.firstChild.data
# get contributor user id
contributor = entry.getElementsByTagName('contributor')[0]
try:
contribName = contributor.getElementsByTagName('name')[0]
contribName = contribName.firstChild.data
except:
contribName = ""
try:
contribEMail = contributor.getElementsByTagName('email')[0]
contribEMail = contribEMail.firstChild.data
except:
contribEMail = ""
contribUserId = contributor.getElementsByTagName('snx:userid')[0]
contribUserId = contribUserId.firstChild.data
# build dict
authorInfo = { "userid" : authorUserId, "name" : authorName, "email" : authorEMail }
contribInfo = { "userid" : contribUserId, "name" : contribName, "email" : contribEMail }
relation = "\"" + authorUserId + "\",\"" + contribUserId + "\",\"<(" + str(dateRelation) + ",Infinity)>\""
qout.put(authorInfo)
qout.put(contribInfo)
qout.put(relation)
# get manager
if getManager == True:
url = server + "/profiles/atom/reportingChain.do?userid=" + userid
rc = getAtomFeed(url, login, pwd)
managerId = getManagerInfo(rc)
if managerId is not None:
reportingChain = str(userid) + "," + str(managerId)
qmgmt.put(reportingChain)
def printStatusThread(q0, q1, q2, q3):
strtime = time.time()
while True:
sys.stdout.write('\r\x1b[K')
sys.stdout.write("urls:" + str(q0.qsize()) + " | ")
sys.stdout.write("userids:" + str(q1.qsize()) + " | ")
sys.stdout.write("user infos:" + str(q2.qsize()) + " | ")
sys.stdout.write("manager infos:" + str(q3.qsize()))
sys.stdout.flush()
time.sleep(1)
def writeFileThread(usersFilename, relationsFilename, qin):
# file for user details
u = open(usersFilename + ".csv", "w")
u.write("Id,Label,eMail\n")
# file for relations
r = open(relationsFilename + ".csv", "w")
r.write("Source,Target,Time Interval\n")
doneUsers = []
while True:
data = qin.get()
if data == None:
u.flush()
r.flush()
u.close()
r.close()
break
# write data
if type(data) is dict:
string = str(data["userid"]) + ',' + str(data["name"]) + ',' + str(data["email"])
if string not in doneUsers:
u.write(string + "\n")
doneUsers.append(string)
elif type(data) is str:
r.write(str(data) + "\n")
qin.task_done()
def writeManagerFileThread(managerFilename, qin):
m = open(managerFilename + ".csv", "w")
m.write("Source,Target\n")
while True:
data = qin.get()
if data == None:
break
m.write(str(data) + "\n")
qin.task_done()
def main(argv):
# global
serverUrl = ""
login = ""
pwd = ""
getManager = False
urlQueue = SetQueue(maxsize=5000)
userIdsQueue = SetQueue(maxsize=5000)
userInfosQueue = Queue(maxsize=5000)
userManagerQueue = Queue(maxsize=5000)
# signal handler
signal.signal(signal.SIGINT, signal_handler)
# retrive arguments
try:
opts, args = getopt.getopt(argv, "hs:u:p:m", ["help", "server=", "user=", "password=", "manager"])
for opt, arg in opts:
if opt in ("-h", "--help"):
usage()
sys.exit()
elif opt in ("-s", "--server"):
serverUrl = arg
elif opt in ("-u", "--user"):
login = arg
elif opt in ("-p", "--password"):
pwd = arg
elif opt in ("-m", "--manager"):
getManager = True
except:
usage()
sys.exit()
# threading get userinfo worker
userIdWorker = []
for i in range(10):
w1 = Thread(target=getUserIdsWorker, args=(login, pwd, urlQueue, userIdsQueue,))
w1.setDaemon(True)
w1.start()
userIdWorker.append(w1)
# threading get relations worker
userInfoWorker = []
for i in range(20):
w2 = Thread(target=getRelationsWorker, args=(serverUrl, login, pwd, userIdsQueue, userInfosQueue, getManager, userManagerQueue,))
w2.setDaemon(True)
w2.start()
userInfoWorker.append(w2)
# thread to print size of queue
w3 = Thread(target=printStatusThread, args=(urlQueue, userIdsQueue, userInfosQueue, userManagerQueue,))
w3.setDaemon(True)
w3.start()
# thread to write files
w4 = Thread(target=writeFileThread, args=("users", "relations", userInfosQueue,))
w4.setDaemon(True)
w4.start()
if getManager == True:
w5 = Thread(target=writeManagerFileThread, args=("manager", userManagerQueue,))
w5.setDaemon(True)
w5.start()
# build Queue url list
MAX_TRY = 10
essai = 0
while essai < MAX_TRY:
try:
buildUrlSearchList(serverUrl, login, pwd, urlQueue)
except KeyboardInterrupt:
break
except:
essai += 1
continue
break
while not (urlQueue.empty() and userIdsQueue.empty() and userInfosQueue.empty()):
pass
print ("end threads")
urlQueue.put(None)
userIdsQueue.put(None)
userInfosQueue.put(None)
# end of workers
for i in userIdWorker:
i.join()
for i in userInfoWorker:
i.join()
time.sleep(5)
sys.exit(0)
if __name__ == '__main__':
main(sys.argv[1:])
| adalmieres/scriptsIBMConnections | IBMConnectionsSocialGraph.py | Python | gpl-3.0 | 8,111 |
#! /usr/bin/env python
# -*- coding: utf-8 -*-
#
# pkpgcounter : a generic Page Description Language parser
#
# (c) 2003-2009 Jerome Alet <alet@librelogiciel.com>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# $Id$
#
#
import sys
import glob
import os
import shutil
try :
from distutils.core import setup
except ImportError as msg :
sys.stderr.write("%s\n" % msg)
sys.stderr.write("You need the DistUtils Python module.\nunder Debian, you may have to install the python-dev package.\nOf course, YMMV.\n")
sys.exit(-1)
try :
from PIL import Image
except ImportError :
sys.stderr.write("You need the Python Imaging Library (aka PIL).\nYou can grab it from http://www.pythonware.com\n")
sys.exit(-1)
sys.path.insert(0, "pkpgpdls")
from pkpgpdls.version import __version__, __doc__
data_files = []
mofiles = glob.glob(os.sep.join(["po", "*", "*.mo"]))
for mofile in mofiles :
lang = mofile.split(os.sep)[1]
directory = os.sep.join(["share", "locale", lang, "LC_MESSAGES"])
data_files.append((directory, [ mofile ]))
docdir = "share/doc/pkpgcounter"
docfiles = ["README", "COPYING", "BUGS", "CREDITS", "AUTHORS", "TODO"]
data_files.append((docdir, docfiles))
if os.path.exists("ChangeLog") :
data_files.append((docdir, ["ChangeLog"]))
directory = os.sep.join(["share", "man", "man1"])
manpages = glob.glob(os.sep.join(["man", "*.1"]))
data_files.append((directory, manpages))
setup(name = "pkpgcounter", version = __version__,
license = "GNU GPL",
description = __doc__,
author = "Jerome Alet",
author_email = "alet@librelogiciel.com",
url = "http://www.pykota.com/software/pkpgcounter/",
packages = [ "pkpgpdls" ],
scripts = [ "bin/pkpgcounter" ],
data_files = data_files)
| lynxis/pkpgcounter | setup.py | Python | gpl-3.0 | 2,361 |
#!/usr/bin/env python
"""This utility script was adopted from StackExchange:
http://stackoverflow.com/questions/12090503/listing-available-com-ports-with-python
Adopted for use with arduino_GC connection project
"""
import sys
import glob
import serial
def serial_ports():
""" Lists serial port names
:raises EnvironmentError:
On unsupported or unknown platforms
:returns:
A list of the serial ports available on the system
"""
if sys.platform.startswith('win'):
ports = ['COM%s' % (i + 1) for i in range(256)]
elif sys.platform.startswith('linux') or sys.platform.startswith('cygwin'):
# this excludes your current terminal "/dev/tty"
ports = glob.glob('/dev/cu[A-Za-z]*')
elif sys.platform.startswith('darwin'):
ports = glob.glob('/dev/cu.*')
else:
raise EnvironmentError('Unsupported platform')
result = []
for port in ports:
try:
s = serial.Serial(port)
s.close()
result.append(port)
except (OSError, serial.SerialException):
pass
return result
if __name__ == '__main__':
print(serial_ports())
| Mobleyta/GasChromino | PythonCode/serial_ports.py | Python | gpl-3.0 | 1,189 |
def _setup_pkgresources():
import pkg_resources
import os
import plistlib
pl = plistlib.readPlist(os.path.join(
os.path.dirname(os.getenv('RESOURCEPATH')), "Info.plist"))
appname = pl.get('CFBundleIdentifier')
if appname is None:
appname = pl['CFBundleDisplayName']
path = os.path.expanduser('~/Library/Caches/%s/python-eggs' % (appname,))
pkg_resources.set_extraction_path(path)
_setup_pkgresources()
| nCoda/macOS | .eggs/py2app-0.14-py2.7.egg/py2app/bootstrap/setup_pkgresource.py | Python | gpl-3.0 | 453 |
#!/usr/bin/env python
# sample module
from jira.client import JIRA
def main():
jira = JIRA()
JIRA(options={'server': 'http://localhost:8100'})
projects = jira.projects()
print projects
for project in projects:
print project.key
# Standard boilerplate to call the main() function.
if __name__ == '__main__':
main() | selvait90/jira-automation | sample.py | Python | gpl-3.0 | 349 |
""" Class that contains client access to the transformation DB handler. """
__RCSID__ = "$Id$"
import types
from DIRAC import S_OK, S_ERROR, gLogger
from DIRAC.Core.Base.Client import Client
from DIRAC.Core.Utilities.List import breakListIntoChunks
from DIRAC.Resources.Catalog.FileCatalogueBase import FileCatalogueBase
from DIRAC.ConfigurationSystem.Client.Helpers.Operations import Operations
rpc = None
url = None
class TransformationClient( Client, FileCatalogueBase ):
""" Exposes the functionality available in the DIRAC/TransformationHandler
This inherits the DIRAC base Client for direct execution of server functionality.
The following methods are available (although not visible here).
Transformation (table) manipulation
deleteTransformation(transName)
getTransformationParameters(transName,paramNames)
getTransformationWithStatus(status)
setTransformationParameter(transName,paramName,paramValue)
deleteTransformationParameter(transName,paramName)
TransformationFiles table manipulation
addFilesToTransformation(transName,lfns)
addTaskForTransformation(transName,lfns=[],se='Unknown')
getTransformationStats(transName)
TransformationTasks table manipulation
setTaskStatus(transName, taskID, status)
setTaskStatusAndWmsID(transName, taskID, status, taskWmsID)
getTransformationTaskStats(transName)
deleteTasks(transName, taskMin, taskMax)
extendTransformation( transName, nTasks)
getTasksToSubmit(transName,numTasks,site='')
TransformationLogging table manipulation
getTransformationLogging(transName)
File/directory manipulation methods (the remainder of the interface can be found below)
getFileSummary(lfns)
exists(lfns)
Web monitoring tools
getDistinctAttributeValues(attribute, selectDict)
getTransformationStatusCounters()
getTransformationSummary()
getTransformationSummaryWeb(selectDict, sortList, startItem, maxItems)
"""
def __init__( self, **kwargs ):
Client.__init__( self, **kwargs )
opsH = Operations()
self.maxResetCounter = opsH.getValue( 'Productions/ProductionFilesMaxResetCounter', 10 )
self.setServer( 'Transformation/TransformationManager' )
def setServer( self, url ):
self.serverURL = url
def getCounters( self, table, attrList, condDict, older = None, newer = None, timeStamp = None,
rpc = '', url = '' ):
rpcClient = self._getRPC( rpc = rpc, url = url )
return rpcClient. getCounters( table, attrList, condDict, older, newer, timeStamp )
def addTransformation( self, transName, description, longDescription, transType, plugin, agentType, fileMask,
transformationGroup = 'General',
groupSize = 1,
inheritedFrom = 0,
body = '',
maxTasks = 0,
eventsPerTask = 0,
addFiles = True,
rpc = '', url = '', timeout = 1800 ):
""" add a new transformation
"""
rpcClient = self._getRPC( rpc = rpc, url = url, timeout = timeout )
return rpcClient.addTransformation( transName, description, longDescription, transType, plugin,
agentType, fileMask, transformationGroup, groupSize, inheritedFrom,
body, maxTasks, eventsPerTask, addFiles )
def getTransformations( self, condDict = {}, older = None, newer = None, timeStamp = 'CreationDate',
orderAttribute = None, limit = 100, extraParams = False, rpc = '', url = '', timeout = None ):
""" gets all the transformations in the system, incrementally. "limit" here is just used to determine the offset.
"""
rpcClient = self._getRPC( rpc = rpc, url = url, timeout = timeout )
transformations = []
# getting transformations - incrementally
offsetToApply = 0
while True:
res = rpcClient.getTransformations( condDict, older, newer, timeStamp, orderAttribute, limit,
extraParams, offsetToApply )
if not res['OK']:
return res
else:
gLogger.verbose( "Result for limit %d, offset %d: %d" % ( limit, offsetToApply, len( res['Value'] ) ) )
if res['Value']:
transformations = transformations + res['Value']
offsetToApply += limit
if len( res['Value'] ) < limit:
break
return S_OK( transformations )
def getTransformation( self, transName, extraParams = False, rpc = '', url = '', timeout = None ):
rpcClient = self._getRPC( rpc = rpc, url = url, timeout = timeout )
return rpcClient.getTransformation( transName, extraParams )
def getTransformationFiles( self, condDict = {}, older = None, newer = None, timeStamp = 'LastUpdate',
orderAttribute = None, limit = 10000, rpc = '', url = '', timeout = 1800 ):
""" gets all the transformation files for a transformation, incrementally.
"limit" here is just used to determine the offset.
"""
rpcClient = self._getRPC( rpc = rpc, url = url, timeout = timeout )
transformationFiles = []
# getting transformationFiles - incrementally
offsetToApply = 0
while True:
res = rpcClient.getTransformationFiles( condDict, older, newer, timeStamp, orderAttribute, limit, offsetToApply )
if not res['OK']:
return res
else:
gLogger.verbose( "Result for limit %d, offset %d: %d" % ( limit, offsetToApply, len( res['Value'] ) ) )
if res['Value']:
transformationFiles = transformationFiles + res['Value']
offsetToApply += limit
if len( res['Value'] ) < limit:
break
return S_OK( transformationFiles )
def getTransformationTasks( self, condDict = {}, older = None, newer = None, timeStamp = 'CreationTime',
orderAttribute = None, limit = 10000, inputVector = False, rpc = '',
url = '', timeout = None ):
""" gets all the transformation tasks for a transformation, incrementally.
"limit" here is just used to determine the offset.
"""
rpcClient = self._getRPC( rpc = rpc, url = url, timeout = timeout )
transformationTasks = []
# getting transformationFiles - incrementally
offsetToApply = 0
while True:
res = rpcClient.getTransformationTasks( condDict, older, newer, timeStamp, orderAttribute, limit,
inputVector, offsetToApply )
if not res['OK']:
return res
else:
gLogger.verbose( "Result for limit %d, offset %d: %d" % ( limit, offsetToApply, len( res['Value'] ) ) )
if res['Value']:
transformationTasks = transformationTasks + res['Value']
offsetToApply += limit
if len( res['Value'] ) < limit:
break
return S_OK( transformationTasks )
def cleanTransformation( self, transID, rpc = '', url = '', timeout = None ):
""" Clean the transformation, and set the status parameter (doing it here, for easier extensibility)
"""
# Cleaning
rpcClient = self._getRPC( rpc = rpc, url = url, timeout = timeout )
res = rpcClient.cleanTransformation( transID )
if not res['OK']:
return res
# Setting the status
return self.setTransformationParameter( transID, 'Status', 'TransformationCleaned' )
def moveFilesToDerivedTransformation( self, transDict, resetUnused = True ):
""" move files input to a transformation, to the derived one
"""
prod = transDict['TransformationID']
parentProd = int( transDict.get( 'InheritedFrom', 0 ) )
movedFiles = {}
if not parentProd:
gLogger.warn( "[None] [%d] .moveFilesToDerivedTransformation: Transformation was not derived..." % prod )
return S_OK( ( parentProd, movedFiles ) )
# get the lfns in status Unused/MaxReset of the parent production
res = self.getTransformationFiles( condDict = {'TransformationID': parentProd, 'Status': [ 'Unused', 'MaxReset' ]} )
if not res['OK']:
gLogger.error( "[None] [%d] .moveFilesToDerivedTransformation: Error getting Unused files from transformation %s:" % ( prod, parentProd ), res['Message'] )
return res
parentFiles = res['Value']
lfns = [lfnDict['LFN'] for lfnDict in parentFiles]
if not lfns:
gLogger.info( "[None] [%d] .moveFilesToDerivedTransformation: No files found to be moved from transformation %d" % ( prod, parentProd ) )
return S_OK( ( parentProd, movedFiles ) )
# get the lfns of the derived production that were Unused/MaxReset in the parent one
res = self.getTransformationFiles( condDict = { 'TransformationID': prod, 'LFN': lfns} )
if not res['OK']:
gLogger.error( "[None] [%d] .moveFilesToDerivedTransformation: Error getting files from derived transformation" % prod, res['Message'] )
return res
derivedFiles = res['Value']
suffix = '-%d' % parentProd
derivedStatusDict = dict( [( derivedDict['LFN'], derivedDict['Status'] ) for derivedDict in derivedFiles] )
newStatusFiles = {}
parentStatusFiles = {}
force = False
for parentDict in parentFiles:
lfn = parentDict['LFN']
derivedStatus = derivedStatusDict.get( lfn )
if derivedStatus:
parentStatus = parentDict['Status']
if resetUnused and parentStatus == 'MaxReset':
status = 'Unused'
moveStatus = 'Unused from MaxReset'
force = True
else:
status = parentStatus
moveStatus = parentStatus
if derivedStatus.endswith( suffix ):
# This file is Unused or MaxReset while it was most likely Assigned at the time of derivation
parentStatusFiles.setdefault( 'Moved-%s' % str( prod ), [] ).append( lfn )
newStatusFiles.setdefault( ( status, parentStatus ), [] ).append( lfn )
movedFiles[moveStatus] = movedFiles.setdefault( moveStatus, 0 ) + 1
elif parentDict['Status'] == 'Unused':
# If the file was Unused already at derivation time, set it NotProcessed
parentStatusFiles.setdefault( 'NotProcessed', [] ).append( lfn )
# Set the status in the parent transformation first
for status, lfnList in parentStatusFiles.items():
for lfnChunk in breakListIntoChunks( lfnList, 5000 ):
res = self.setFileStatusForTransformation( parentProd, status, lfnChunk )
if not res['OK']:
gLogger.error( "[None] [%d] .moveFilesToDerivedTransformation: Error setting status %s for %d files in transformation %d "
% ( prod, status, len( lfnList ), parentProd ),
res['Message'] )
# Set the status in the new transformation
for ( status, oldStatus ), lfnList in newStatusFiles.items():
for lfnChunk in breakListIntoChunks( lfnList, 5000 ):
res = self.setFileStatusForTransformation( prod, status, lfnChunk, force = force )
if not res['OK']:
gLogger.error( "[None] [%d] .moveFilesToDerivedTransformation: Error setting status %s for %d files; resetting them %s in transformation %d"
% ( prod, status, len( lfnChunk ), oldStatus, parentProd ),
res['Message'] )
res = self.setFileStatusForTransformation( parentProd, oldStatus, lfnChunk )
if not res['OK']:
gLogger.error( "[None] [%d] .moveFilesToDerivedTransformation: Error setting status %s for %d files in transformation %d"
% ( prod, oldStatus, len( lfnChunk ), parentProd ),
res['Message'] )
return S_OK( ( parentProd, movedFiles ) )
def setFileStatusForTransformation( self, transName, newLFNsStatus = {}, lfns = [], force = False,
rpc = '', url = '', timeout = 120 ):
""" sets the file status for LFNs of a transformation
For backward compatibility purposes, the status and LFNs can be passed in 2 ways:
- newLFNsStatus is a dictionary with the form:
{'/this/is/an/lfn1.txt': 'StatusA', '/this/is/an/lfn2.txt': 'StatusB', ... }
and at this point lfns is not considered
- newLFNStatus is a string, that applies to all the LFNs in lfns
"""
rpcClient = self._getRPC( rpc = rpc, url = url, timeout = timeout )
# create dictionary in case newLFNsStatus is a string
if type( newLFNsStatus ) == type( '' ):
newLFNsStatus = dict( [( lfn, newLFNsStatus ) for lfn in lfns ] )
# gets status as of today
tsFiles = self.getTransformationFiles( {'TransformationID':transName, 'LFN': newLFNsStatus.keys()} )
if not tsFiles['OK']:
return tsFiles
tsFiles = tsFiles['Value']
if tsFiles:
# for convenience, makes a small dictionary out of the tsFiles, with the lfn as key
tsFilesAsDict = {}
for tsFile in tsFiles:
tsFilesAsDict[tsFile['LFN']] = [tsFile['Status'], tsFile['ErrorCount'], tsFile['FileID']]
# applying the state machine to the proposed status
newStatuses = self._applyTransformationFilesStateMachine( tsFilesAsDict, newLFNsStatus, force )
if newStatuses: # if there's something to update
# must do it for the file IDs...
newStatusForFileIDs = dict( [( tsFilesAsDict[lfn][2], newStatuses[lfn] ) for lfn in newStatuses.keys()] )
res = rpcClient.setFileStatusForTransformation( transName, newStatusForFileIDs )
if not res['OK']:
return res
return S_OK( newStatuses )
def _applyTransformationFilesStateMachine( self, tsFilesAsDict, dictOfProposedLFNsStatus, force ):
""" For easier extension, here we apply the state machine of the production files.
VOs might want to replace the standard here with something they prefer.
tsFiles is a dictionary with the lfn as key and as value a list of [Status, ErrorCount, FileID]
dictOfNewLFNsStatus is a dictionary with the proposed status
force is a boolean
It returns a dictionary with the status updates
"""
newStatuses = {}
for lfn in dictOfProposedLFNsStatus.keys():
if lfn not in tsFilesAsDict.keys():
continue
else:
newStatus = dictOfProposedLFNsStatus[lfn]
# Apply optional corrections
if tsFilesAsDict[lfn][0].lower() == 'processed' and dictOfProposedLFNsStatus[lfn].lower() != 'processed':
if not force:
newStatus = 'Processed'
elif tsFilesAsDict[lfn][0].lower() == 'maxreset':
if not force:
newStatus = 'MaxReset'
elif dictOfProposedLFNsStatus[lfn].lower() == 'unused':
errorCount = tsFilesAsDict[lfn][1]
# every 10 retries (by default)
if errorCount and ( ( errorCount % self.maxResetCounter ) == 0 ):
if not force:
newStatus = 'MaxReset'
if tsFilesAsDict[lfn][0].lower() != newStatus:
newStatuses[lfn] = newStatus
return newStatuses
def setTransformationParameter( self, transID, paramName, paramValue, force = False,
rpc = '', url = '', timeout = 120 ):
""" Sets a transformation parameter. There's a special case when coming to setting the status of a transformation.
"""
rpcClient = self._getRPC( rpc = rpc, url = url, timeout = timeout )
if paramName.lower() == 'status':
# get transformation Type
transformation = self.getTransformation( transID )
if not transformation['OK']:
return transformation
transformationType = transformation['Value']['Type']
# get status as of today
originalStatus = self.getTransformationParameters( transID, 'Status' )
if not originalStatus['OK']:
return originalStatus
originalStatus = originalStatus['Value']
transIDAsDict = {transID: [originalStatus, transformationType]}
dictOfProposedstatus = {transID: paramValue}
# applying the state machine to the proposed status
value = self._applyTransformationStatusStateMachine( transIDAsDict, dictOfProposedstatus, force )
else:
value = paramValue
return rpcClient.setTransformationParameter( transID, paramName, value )
def _applyTransformationStatusStateMachine( self, transIDAsDict, dictOfProposedstatus, force ):
""" For easier extension, here we apply the state machine of the transformation status.
VOs might want to replace the standard here with something they prefer.
transIDAsDict is a dictionary with the transID as key and as value a list with [Status, Type]
dictOfProposedstatus is a dictionary with the proposed status
force is a boolean
It returns the new status (the standard is just doing nothing: everything is possible)
"""
return dictOfProposedstatus.values()[0]
#####################################################################
#
# These are the file catalog interface methods
#
def isOK( self ):
return self.valid
def getName( self, DN = '' ):
""" Get the file catalog type name
"""
return self.name
def addDirectory( self, path, force = False, rpc = '', url = '', timeout = None ):
rpcClient = self._getRPC( rpc = rpc, url = url, timeout = timeout )
return rpcClient.addDirectory( path, force )
def getReplicas( self, lfn, rpc = '', url = '', timeout = None ):
res = self.__checkArgumentFormat( lfn )
if not res['OK']:
return res
lfns = res['Value'].keys()
rpcClient = self._getRPC( rpc = rpc, url = url, timeout = timeout )
return rpcClient.getReplicas( lfns )
def addFile( self, lfn, force = False, rpc = '', url = '', timeout = None ):
res = self.__checkArgumentFormat( lfn )
if not res['OK']:
return res
lfndicts = res['Value']
rpcClient = self._getRPC( rpc = rpc, url = url, timeout = timeout )
return rpcClient.addFile( lfndicts, force )
def addReplica( self, lfn, force = False, rpc = '', url = '', timeout = None ):
res = self.__checkArgumentFormat( lfn )
if not res['OK']:
return res
lfndicts = res['Value']
rpcClient = self._getRPC( rpc = rpc, url = url, timeout = timeout )
return rpcClient.addReplica( lfndicts, force )
def removeFile( self, lfn, rpc = '', url = '', timeout = None ):
res = self.__checkArgumentFormat( lfn )
if not res['OK']:
return res
lfns = res['Value'].keys()
rpcClient = self._getRPC( rpc = rpc, url = url, timeout = timeout )
successful = {}
failed = {}
listOfLists = breakListIntoChunks( lfns, 100 )
for fList in listOfLists:
res = rpcClient.removeFile( fList )
if not res['OK']:
return res
successful.update( res['Value']['Successful'] )
failed.update( res['Value']['Failed'] )
resDict = {'Successful': successful, 'Failed':failed}
return S_OK( resDict )
def removeReplica( self, lfn, rpc = '', url = '', timeout = None ):
res = self.__checkArgumentFormat( lfn )
if not res['OK']:
return res
lfndicts = res['Value']
rpcClient = self._getRPC( rpc = rpc, url = url, timeout = timeout )
successful = {}
failed = {}
# as lfndicts is a dict, the breakListIntoChunks will fail. Fake it!
listOfDicts = []
localdicts = {}
for lfn, info in lfndicts.items():
localdicts.update( { lfn : info } )
if len( localdicts.keys() ) % 100 == 0:
listOfDicts.append( localdicts )
localdicts = {}
for fDict in listOfDicts:
res = rpcClient.removeReplica( fDict )
if not res['OK']:
return res
successful.update( res['Value']['Successful'] )
failed.update( res['Value']['Failed'] )
resDict = {'Successful': successful, 'Failed':failed}
return S_OK( resDict )
def getReplicaStatus( self, lfn, rpc = '', url = '', timeout = None ):
res = self.__checkArgumentFormat( lfn )
if not res['OK']:
return res
lfndict = res['Value']
rpcClient = self._getRPC( rpc = rpc, url = url, timeout = timeout )
return rpcClient.getReplicaStatus( lfndict )
def setReplicaStatus( self, lfn, rpc = '', url = '', timeout = None ):
res = self.__checkArgumentFormat( lfn )
if not res['OK']:
return res
lfndict = res['Value']
rpcClient = self._getRPC( rpc = rpc, url = url, timeout = timeout )
return rpcClient.setReplicaStatus( lfndict )
def setReplicaHost( self, lfn, rpc = '', url = '', timeout = None ):
res = self.__checkArgumentFormat( lfn )
if not res['OK']:
return res
lfndict = res['Value']
rpcClient = self._getRPC( rpc = rpc, url = url, timeout = timeout )
return rpcClient.setReplicaHost( lfndict )
def removeDirectory( self, lfn, rpc = '', url = '', timeout = None ):
return self.__returnOK( lfn )
def createDirectory( self, lfn, rpc = '', url = '', timeout = None ):
return self.__returnOK( lfn )
def createLink( self, lfn, rpc = '', url = '', timeout = None ):
return self.__returnOK( lfn )
def removeLink( self, lfn, rpc = '', url = '', timeout = None ):
return self.__returnOK( lfn )
def __returnOK( self, lfn ):
res = self.__checkArgumentFormat( lfn )
if not res['OK']:
return res
successful = {}
for lfn in res['Value'].keys():
successful[lfn] = True
resDict = {'Successful':successful, 'Failed':{}}
return S_OK( resDict )
def __checkArgumentFormat( self, path ):
if type( path ) in types.StringTypes:
urls = {path:False}
elif type( path ) == types.ListType:
urls = {}
for url in path:
urls[url] = False
elif type( path ) == types.DictType:
urls = path
else:
return S_ERROR( "TransformationClient.__checkArgumentFormat: Supplied path is not of the correct format." )
return S_OK( urls )
| avedaee/DIRAC | TransformationSystem/Client/TransformationClient.py | Python | gpl-3.0 | 22,189 |
from django.db import models
from django.contrib.auth.models import User
import MySQLdb
# Create your models here.
class Comentario(models.Model):
"""Comentario"""
contenido = models.TextField(help_text='Escribe un comentario')
fecha_coment = models.DateField(auto_now=True)
def __unicode__(self):
return self.contenido
class Estado(models.Model):
"""Estado"""
nom_estado = models.CharField(max_length=50)
def __unicode__(self):
return nom_estado
class Categoria(models.Model):
"""Categoria"""
nombre = models.CharField(max_length=50)
descripcion = models.TextField(help_text='Escribe una descripcion de la categoria')
class Entrada(models.Model):
"""Entrada"""
autor = models.ForeignKey(User)
comentario = models.ForeignKey(Comentario)
estado = models.ForeignKey(Estado)
titulo = models.CharField(max_length=100)
contenido = models.TextField(help_text='Redacta el contenido')
fecha_pub = models.DateField(auto_now=True)
def __unicode__(self):
return self.titulo
class Agregador(models.Model):
"""agreador"""
entrada = models.ForeignKey(Entrada)
categoria = models.ManyToManyField(Categoria) | darciga/cf | blog/models.py | Python | gpl-3.0 | 1,127 |
import unittest
from itertools import izip
import numpy as np
from numpy import cos, sin, pi
from pele.angleaxis import RBTopology, RigidFragment, RBPotentialWrapper
from pele.potentials import LJ
from pele.angleaxis._otp_cluster import OTPCluster
from pele.thermodynamics import get_thermodynamic_information
from pele.utils import rotations
from pele.angleaxis._aa_utils import _rot_mat_derivative, _sitedist_grad, _sitedist
from pele.angleaxis.aamindist import MeasureRigidBodyCluster
_x03 = np.array([2.550757898788, 2.591553038507, 3.696836364193,
2.623281513163, 3.415794212648, 3.310786279789,
1.791383852327, 2.264321752809, 4.306217333671,
0.761945654023, -0.805817782109, 1.166981882601,
0.442065301864, -2.747066418223, -1.784325262714,
-1.520905562598, 0.403670860200, -0.729768985400])
_x03_atomistic = np.array([3.064051819556, 2.474533745459, 3.646107658946,
2.412011983074, 2.941152759499, 4.243695098053,
2.176209893734, 2.358972610563, 3.200706335581,
2.786627589565, 3.211876105193, 2.850924310983,
1.962626909252, 3.436918873216, 3.370903763850,
3.120590040673, 3.598587659535, 3.710530764535,
1.697360211099, 2.317229950712, 4.823998989452,
2.283487958310, 1.840698306602, 4.168734267290,
1.393303387573, 2.635037001113, 3.925918744272
])
class TestOTPExplicit(unittest.TestCase):
def make_otp(self):
"""this constructs a single OTP molecule"""
otp = RigidFragment()
otp.add_atom("O", np.array([0.0, -2./3 * np.sin( 7.*pi/24.), 0.0]), 1.)
otp.add_atom("O", np.array([cos( 7.*pi/24.), 1./3. * sin( 7.* pi/24.), 0.0]), 1.)
otp.add_atom("O", np.array([-cos( 7.* pi/24.), 1./3. * sin( 7.*pi/24), 0.0]), 1.)
otp.finalize_setup()
return otp
def setUp(self):
nrigid = 3
self.topology = RBTopology()
self.topology.add_sites([self.make_otp() for i in xrange(nrigid)])
self.topology.finalize_setup()
cartesian_potential = LJ()
self.pot = RBPotentialWrapper(self.topology, cartesian_potential)
self.x0 = _x03
self.x0 = np.array(self.x0)
self.e0 = -17.3387670023
assert nrigid * 6 == self.x0.size
self.x0atomistic = _x03_atomistic
self.nrigid = nrigid
def test_energy(self):
e = self.pot.getEnergy(self.x0)
self.assertAlmostEqual(e, self.e0, delta=1e-4)
def test_energy_gradient(self):
e = self.pot.getEnergy(self.x0)
gnum = self.pot.NumericalDerivative(self.x0)
e2, g = self.pot.getEnergyGradient(self.x0)
self.assertAlmostEqual(e, e2, delta=1e-4)
for i in xrange(g.size):
self.assertAlmostEqual(g[i], gnum[i], 2)
def test_to_atomistic(self):
xatom = self.topology.to_atomistic(self.x0).flatten()
for i in xrange(xatom.size):
self.assertAlmostEqual(xatom[i], self.x0atomistic[i], 2)
def test_site_to_atomistic(self):
rf = self.make_otp()
p = np.array([1., 2, 3])
p /= np.linalg.norm(p)
com = np.array([4., 5, 6])
print "otp to atomistic"
print rf.to_atomistic(com, p)
print "otp transform grad"
g = np.array(range(9), dtype=float).reshape([-1,3])
print g.reshape(-1)
print rf.transform_grad(p, g)
def test_to_atomistic2(self):
x0 = np.array(range(self.nrigid * 6), dtype=float)
x2 = x0.reshape([-1,3])
for p in x2[self.nrigid:,:]:
p /= np.linalg.norm(p)
atomistic = self.topology.to_atomistic(x0).flatten()
from pele.potentials import LJ
lj = LJ()
e, g = lj.getEnergyGradient(atomistic.reshape(-1))
grb = self.topology.transform_gradient(x0, g)
rbpot = RBPotentialWrapper(self.topology, lj)
print rbpot.getEnergy(x0)
class TestCppRBPotentialWrapper(TestOTPExplicit):
def test_pot_wrapper(self):
from pele.angleaxis import _cpp_aa
from pele.potentials import LJ
rbpot_cpp = _cpp_aa.RBPotentialWrapper(self.topology, LJ())
rbpot = RBPotentialWrapper(self.topology, LJ())
self.assertAlmostEqual(rbpot_cpp.getEnergy(self.x0),
rbpot.getEnergy(self.x0), 4)
e1, grad1 = rbpot_cpp.getEnergyGradient(self.x0)
e2, grad2 = rbpot.getEnergyGradient(self.x0)
self.assertAlmostEqual(e1, e2, 4)
for g1, g2 in zip(grad1, grad2):
self.assertAlmostEqual(g1, g2, 3)
# print "energy cpp"
# print e1, e2
# print grad1
# print grad2
_x1 = np.array([ 1.9025655 , 0.39575842, 2.70994994, 1.12711741, 0.63413933,
1.99433564, 1.86553644, 1.71434811, 2.22927686, 0.80189315,
1.19513512, 3.02357997, 1.25845172, -0.06244027, 1.27217385,
-2.26564485, 0.25537024, 0.66231258, -1.49510664, 0.94428774,
-0.04120075, -0.87664883, -0.21441754, 2.05796547])
_x2 = np.array([ 2.01932983, 0.32928065, 2.34949584, 1.12261277, 0.84195098,
2.08827517, 1.42644916, 1.83608794, 2.23147536, 1.12872074,
0.93206141, 3.28789605, 1.73243138, -0.1199651 , 1.02925229,
-1.64603729, 0.30701482, 0.90204992, -1.96259809, 0.06557119,
0.11010908, -0.37462588, -0.42374544, 1.97728056])
class TestOTPCluster(unittest.TestCase):
def setUp(self):
np.random.seed(0)
self.nmol = 4
self.system = OTPCluster(self.nmol)
pot = self.system.get_potential()
self.db = self.system.create_database()
self.m1 = self.db.addMinimum(pot.getEnergy(_x1), _x1)
self.m2 = self.db.addMinimum(pot.getEnergy(_x2), _x2)
def test1(self):
pot = self.system.get_potential()
self.assertLess(np.linalg.norm(pot.getGradient(self.m1.coords)), .1)
self.assertLess(np.linalg.norm(pot.getGradient(self.m2.coords)), .1)
def test_basinhopping(self):
db = self.system.create_database()
bh = self.system.get_basinhopping(db)
bh.setPrinting(ostream=None)
bh.run(5)
self.assertGreaterEqual(db.number_of_minima(), 1)
def test_double_ended_connect(self):
connect = self.system.get_double_ended_connect(self.m1, self.m2, self.db)
connect.connect()
self.assertTrue(connect.success())
path = connect.returnPath()
def test_thermodynamics(self):
get_thermodynamic_information(self.system, self.db, nproc=None, recalculate=True)
self.assertIsNotNone(self.m1.fvib)
mt = self.system.get_metric_tensor(self.m1.coords)
print "metric tensor"
print mt
class TestRBTopologyOTP(unittest.TestCase):
def setUp(self):
np.random.seed(0)
self.nmol = 3
self.system = OTPCluster(self.nmol)
# pot = self.system.get_potential()
# self.db = self.system.create_database()
# self.m1 = self.db.addMinimum(pot.getEnergy(_x1), _x1)
# self.m2 = self.db.addMinimum(pot.getEnergy(_x2), _x2)
self.x0 = np.array([ 0, 1, 2, 3, 4, 5, 6, 7, 8,
0.517892, 0.575435, 0.632979,
0.531891, 0.576215, 0.620539,
0.540562, 0.5766, 0.612637 ])
from pele.angleaxis.aamindist import TransformAngleAxisCluster
self.topology = self.system.aatopology
self.transform = TransformAngleAxisCluster(self.topology)
self.p0 = np.array(range(1,4), dtype=float)
self.p0 /= np.linalg.norm(self.p0)
def test_transform_rotate(self):
print "\ntest rotate"
x = self.x0.copy()
p = np.array(range(1,4), dtype=float)
p /= np.linalg.norm(p)
self.transform.rotate(x, rotations.aa2mx(p))
xnewtrue = np.array([ 0.48757698, 0.61588594, 2.09355038, 2.02484605, 4.76822812,
4.81289924, 3.56211511, 8.92057031, 7.53224809, 0.71469473,
1.23875927, 1.36136748, 0.72426504, 1.24674367, 1.34426835,
0.73015833, 1.25159032, 1.33345003])
for v1, v2 in izip(x, xnewtrue):
self.assertAlmostEqual(v1, v2, 5)
def test_align_path(self):
print "\ntest align_path"
x1 = self.x0.copy()
x2 = self.x0 + 5
self.topology.align_path([x1, x2])
x2true = np.array([ 5. , 6. , 7. , 8. ,
9. , 10. , 11. , 12. ,
13. , 1.92786071, 1.94796529, 1.96807021,
1.93320298, 1.94869267, 1.96418236, 1.93645608,
1.94905155, 1.96164668])
for v1, v2 in izip(x1, self.x0):
self.assertAlmostEqual(v1, v2, 5)
for v1, v2 in izip(x2, x2true):
self.assertAlmostEqual(v1, v2, 5)
def test_cpp_zero_ev(self):
print "\ntest zeroEV cpp"
x = self.x0.copy()
zev = self.topology._zeroEV_python(x)
czev = self.topology.cpp_topology.get_zero_modes(x)
self.assertEqual(len(czev), 6)
for ev, cev in izip(zev, czev):
for v1, v2 in izip(ev, cev):
self.assertAlmostEqual(v1, v2, 5)
def test_site_distance_squared(self):
print "\ntest site distance squared"
c0 = np.zeros(3)
c1 = np.ones(3)
p0 = self.p0.copy()
p1 = p0 + 1
site = self.system.make_otp()
d2 = site.distance_squared(c0, p0, c1, p1)
d2p = _sitedist(c1-c0, p0, p1, site.S, site.W, site.cog)
self.assertAlmostEqual(d2, 10.9548367929, 5)
def test_distance_squared(self):
print "\ntest distance squared"
x1 = self.x0.copy()
x2 = self.x0 + 1.1
d2 = self.topology.distance_squared(x1, x2)
d3 = self.topology._distance_squared_python(x1, x2)
self.assertAlmostEqual(d2, 38.9401810973, 5)
self.assertAlmostEqual(d2, d3, 5)
def test_distance_squared_grad(self):
print "\ntest distance squared grad"
x1 = self.x0.copy()
x2 = self.x0 + 1.1
grad = self.topology.distance_squared_grad(x1, x2)
g2 = self.topology._distance_squared_grad_python(x1, x2)
gtrue = np.array([-6.6 , -6.6 , -6.6 , -6.6 , -6.6 ,
-6.6 , -6.6 , -6.6 , -6.6 , -1.21579025,
-0.07013805, -1.2988823 , -1.21331786, -0.06984532, -1.28945301,
-1.2116105 , -0.06975828, -1.28362943])
for v1, v2 in izip(grad, gtrue):
self.assertAlmostEqual(v1, v2, 5)
for v1, v2 in izip(grad, g2):
self.assertAlmostEqual(v1, v2, 5)
def test_measure_align(self):
print "\ntest measure align"
x1 = self.x0.copy()
x2 = self.x0 + 5.1
x2[-1] = x1[-1] + .1
x20 = x2.copy()
measure = MeasureRigidBodyCluster(self.topology)
measure.align(x1, x2)
if __name__ == "__main__":
unittest.main()
| cjforman/pele | pele/angleaxis/tests/test_otp.py | Python | gpl-3.0 | 11,705 |
# -*- coding: utf8 -*-
SQL = """select SQL_CALC_FOUND_ROWS * FROM doc_view order by `name` asc limit %(offset)d,%(limit)d ;"""
FOUND_ROWS = True
ROOT = "doc_view_list"
ROOT_PREFIX = "<doc_view_edit />"
ROOT_POSTFIX= None
XSL_TEMPLATE = "data/af-web.xsl"
EVENT = None
WHERE = ()
PARAM = None
TITLE="Список видов документов"
MESSAGE="ошибка получения списка видов документов"
ORDER = None
| ffsdmad/af-web | cgi-bin/plugins2/doc_view_list.py | Python | gpl-3.0 | 444 |
# -*- encoding: utf-8 -*-
#
# OpenERP, Open Source Management Solution
# This module copyright (C) 2014 Savoir-faire Linux
# (<http://www.savoirfairelinux.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import logging
from openerp.osv import osv, fields
_logger = logging.getLogger(__name__)
class res_users(osv.osv):
_inherit = "res.users"
_columns = {
'xis_user_external_id': fields.integer('XIS external user',
required=True),
}
| Xprima-ERP/odoo_addons | xpr_xis_connector/res_users.py | Python | gpl-3.0 | 1,160 |
"""
Contains format specification class and methods to parse it from JSON.
.. codeauthor:: Tomas Krizek <tomas.krizek1@tul.cz>
"""
import json
import re
def get_root_input_type_from_json(data):
"""Return the root input type from JSON formatted string."""
return parse_format(json.loads(data))
def parse_format(data):
"""Returns root input type from data."""
input_types = {}
data = data['ist_nodes']
root_id = data[0]['id'] # set root type
for item in data:
input_type = _get_input_type(item)
if input_type is not None:
input_types[input_type['id']] = input_type # register by id
_substitute_ids_with_references(input_types)
return input_types[root_id]
SCALAR = ['Integer', 'Double', 'Bool', 'String', 'Selection', 'FileName']
def is_scalar(input_type):
"""Returns True if input_type is scalar."""
return input_type['base_type'] in SCALAR
RE_PARAM = re.compile('^<([a-zA-Z][a-zA-Z0-9_]*)>$')
def is_param(value):
"""Determine whether given value is a parameter string."""
if not isinstance(value, str):
return False
return RE_PARAM.match(value)
def _substitute_ids_with_references(input_types):
"""Replaces ids or type names with python object references."""
input_type = {}
def _substitute_implementations():
"""Replaces implementation ids with input_types."""
impls = {}
for id_ in input_type['implementations']:
type_ = input_types[id_]
impls[type_['name']] = type_
input_type['implementations'] = impls
def _substitute_default_descendant():
"""Replaces default descendant id with input_type."""
id_ = input_type.get('default_descendant', None)
if id_ is not None:
input_type['default_descendant'] = input_types[id_]
def _substitute_key_type():
"""Replaces key type with input_type."""
# pylint: disable=unused-variable, invalid-name
for __, value in input_type['keys'].items():
value['type'] = input_types[value['type']]
# pylint: disable=unused-variable, invalid-name
for __, input_type in input_types.items():
if input_type['base_type'] == 'Array':
input_type['subtype'] = input_types[input_type['subtype']]
elif input_type['base_type'] == 'Abstract':
_substitute_implementations()
_substitute_default_descendant()
elif input_type['base_type'] == 'Record':
_substitute_key_type()
def _get_input_type(data):
"""Returns the input_type data structure that defines an input type
and its constraints for validation."""
if 'id' not in data or 'input_type' not in data:
return None
input_type = dict(
id=data['id'],
base_type=data['input_type']
)
input_type['name'] = data.get('name', '')
input_type['full_name'] = data.get('full_name', '')
input_type['description'] = data.get('description', '')
input_type['attributes'] = data.get('attributes', {})
if input_type['base_type'] in ['Double', 'Integer']:
input_type.update(_parse_range(data))
elif input_type['base_type'] == 'Array':
input_type.update(_parse_range(data))
if input_type['min'] < 0:
input_type['min'] = 0
input_type['subtype'] = data['subtype']
elif input_type['base_type'] == 'FileName':
input_type['file_mode'] = data['file_mode']
elif input_type['base_type'] == 'Selection':
input_type['values'] = _list_to_dict(data['values'], 'name')
elif input_type['base_type'] == 'Record':
input_type['keys'] = _list_to_dict(data['keys'])
input_type['implements'] = data.get('implements', [])
input_type['reducible_to_key'] = data.get('reducible_to_key', None)
elif input_type['base_type'] == 'Abstract':
input_type['implementations'] = data['implementations']
input_type['default_descendant'] = data.get('default_descendant', None)
return input_type
def _parse_range(data):
"""Parses the format range properties - min, max."""
input_type = {}
try:
input_type['min'] = data['range'][0]
except (KeyError, TypeError): # set default value
input_type['min'] = float('-inf')
try:
input_type['max'] = data['range'][1]
except (KeyError, TypeError): # set default value
input_type['max'] = float('inf')
return input_type
def _list_to_dict(list_, key_label='key'):
"""
Transforms a list of dictionaries into a dictionary of dictionaries.
Original dictionaries are assigned key specified in each of them
by key_label.
"""
dict_ = {}
for item in list_:
dict_[item[key_label]] = item
return dict_
| GeoMop/GeoMop | src/gm_base/model_data/format.py | Python | gpl-3.0 | 4,784 |
"""
Page view class
"""
import os
from Server.Importer import ImportFromModule
class PageView(ImportFromModule("Server.PageViewBase", "PageViewBase")):
"""
Page view class.
"""
_PAGE_TITLE = "Python Web Framework"
def __init__(self, htmlToLoad):
"""
Constructor.
- htmlToLoad : HTML to load
"""
self.SetPageTitle(self._PAGE_TITLE)
self.AddMetaData("charset=\"UTF-8\"")
self.AddMetaData("name=\"viewport\" content=\"width=device-width, initial-scale=1\"")
self.AddStyleSheet("/css/styles.css")
self.AddJavaScript("/js/http.js")
self.LoadHtml(os.path.join(os.path.dirname(__file__), "%s.html" % htmlToLoad))
self.SetPageData({ "PageTitle" : self._PAGE_TITLE })
| allembedded/python_web_framework | WebApplication/Views/PageView.py | Python | gpl-3.0 | 781 |
import discord
import asyncio
import datetime
import time
import aiohttp
import threading
import glob
import re
import json
import os
import urllib.request
from discord.ext import commands
from random import randint
from random import choice as randchoice
from random import choice as rndchoice
from random import shuffle
from .utils.dataIO import fileIO
from .utils import checks
from bs4 import BeautifulSoup
class Runescapecompare:
"""Runescape-relate commands"""
def __init__(self, bot):
self.bot = bot
"""
imLink = http://services.runescape.com/m=hiscore_ironman/index_lite.ws?player=
nmLink = http://services.runescape.com/m=hiscore/index_lite.ws?player=
"""
@commands.group(name="compare", pass_context=True)
async def _compare(self, ctx):
if ctx.invoked_subcommand is None:
await self.bot.say("Please, choose a skill to compare!")
#####Overall#####
@_compare.command(name="overall", pass_context=True)
async def compare_overall(self, ctx, name1 : str, name2 : str):
address1 = "http://services.runescape.com/m=hiscore_ironman/index_lite.ws?player=" + name1
address2 = "http://services.runescape.com/m=hiscore_ironman/index_lite.ws?player=" + name2
try:
website1 = urllib.request.urlopen(address1)
website2 = urllib.request.urlopen(address2)
website_html1 = website1.read().decode(website1.headers.get_content_charset())
website_html2 = website2.read().decode(website2.headers.get_content_charset())
stats1 = website_html1.split("\n")
stats2 = website_html2.split("\n")
stat1 = stats1[0].split(",")
stat2= stats2[0].split(",")
if stat1[2] > stat2[2]:
comparerank = int(stat2[0]) - int(stat1[0])
comparelvl = int(stat1[1]) - int(stat2[1])
comparexp = int(stat1[2]) - int(stat2[2])
await self.bot.say("```" + name1 + "'s ranking is " + str(comparerank) + " ranks higher than " + name2 + "'s rank.\n" + name1 + "'s level is " + str(comparelvl) + " levels higher than " + name2 + "'s.\n" + name1 + "'s total experience is " + str(comparexp) + " higher than " + name2 + "'s.```")
if stat2[2] > stat1[2]:
comparerank = stat2[0] - stat1[0]
comparelvl = stat2[1] - stat1[1]
comparexp = stat2[2] - stat1[2]
await self.bot.say("```" + name2 + "'s ranking is " + str(comparerank) + " ranks higher than " + name1 + "'s rank.\n" + name2 + "'s level is " + str(comparelvl) + " levels higher than " + name1 + "'s.\n" + name2 + "'s total experience is " + str(comparexp) + " higher than " + name1 + "'s.```")
except:
await self.bot.say("Sorry... Something went wrong there. Did you type the name correctly?")
def setup(bot):
n = Runescapecompare(bot)
bot.add_cog(n)
| IODisrupt/OmegaBot | cogs/runescapecompare.py | Python | gpl-3.0 | 2,978 |
import string
import ast
from state_machine import PSM, Source
class SpecialPattern:
individual_chars = ('t', 'n', 'v', 'f', 'r', '0')
range_chars = ('d', 'D', 'w', 'W', 's', 'S')
special_chars = ('^', '$', '[', ']', '(', ')', '{', '}', '\\', '.', '*',
'?', '+', '|', '.')
restrict_special_chars = ('\\', '[', ']')
posix_classes = ("alnum", "alpha", "blank", "cntrl", "digit", "graph",
"lower", "print", "punct", "space", "upper", "xdigit",
"d", "w", "s")
min_len_posix_class = 1
#-------------------------------------
# Group
class WrappedGroup:
def __init__(self):
self.group = ast.Group()
self.is_alt = False
def add(self, other):
if self.is_alt:
last_alt = self.alt.parts[-1] + (other,)
self.alt.parts = self.alt.parts[:-1] + (last_alt,)
else:
self.group.seq = self.group.seq + (other,)
@property
def alt(self) -> ast.Alternative:
assert self.is_alt
return self.group.seq[0]
def collapse_alt(self):
if self.is_alt:
self.alt.parts = self.alt.parts + ((),)
else:
self.is_alt = True
first_alt_elems = self.group.seq
self.group.seq = (ast.Alternative(),)
self.alt.parts = (first_alt_elems,())
class OpeningOfGroup:
def __init__(self, parent: None, initial: bool=False):
self.is_initial = initial
self.parent = parent # OpeningOfGroup or ContentOfGroup
self.g = WrappedGroup()
self.content_of_initial = None
# forward of function
self.add = self.g.add
# if this group is the initial, their is no parent but we must refer
# to itself as the returning state
# but if it is a nested group, it must be added into its global group
if self.is_initial:
self.content_of_initial = ContentOfGroup(self, initial)
else:
self.parent.add(self.g.group)
def next(self, psm: PSM):
if not self.is_initial and psm.char == "?":
return FirstOptionOfGroup(self)
elif psm.char == ")":
if self.is_initial:
psm.error = 'unexpected ")"'
else:
return self.parent
elif psm.char == "(":
return OpeningOfGroup(self)
elif self.is_initial:
return self.content_of_initial.next(psm)
else:
t = ContentOfGroup(self)
return t.next(psm)
class FirstOptionOfGroup:
def __init__(self, parent: OpeningOfGroup):
self.parent = parent
def next(self, psm: PSM):
if psm.char == ":":
self.parent.g.group.ignored = True
return ContentOfGroup(self.parent)
elif psm.char == "!":
self.parent.g.group.lookhead = ast.Group.NegativeLookhead
return ContentOfGroup(self.parent)
elif psm.char == "=":
self.parent.g.group.lookhead = ast.Group.PositiveLookhead
return ContentOfGroup(self.parent)
elif psm.char == "<":
self.parent.g.group.name = ""
return NameOfGroup(self.parent)
else:
psm.error = 'expected ":", "!", "<" or "="'
class NameOfGroup:
def __init__(self, parent: OpeningOfGroup):
self.parent = parent
def next(self, psm: PSM):
if psm.char.isalpha() or psm.char == "_":
self.parent.g.group.name += psm.char
return self
elif psm.char == ">":
return self.parent
else:
psm.error = 'expected a letter, "_" or ">"'
class ContentOfGroup:
NotQuantified = 0
Quantified = 1
UngreedyQuantified = 2
def __init__(self, parent: OpeningOfGroup, initial: bool=False):
self.parent = parent
self.is_initial = initial
self.limited_prev = parent if initial else self
self.quantified = ContentOfGroup.NotQuantified
# forward of function
self.add = self.parent.add
def next(self, psm: PSM):
quantified = self.quantified
self.quantified = ContentOfGroup.NotQuantified
if psm.char == ")":
if self.is_initial:
psm.error = "unbalanced parenthesis"
else:
return self.parent.parent
elif psm.char == "(":
return OpeningOfGroup(self.limited_prev)
elif psm.char == "^":
self.add(ast.MatchBegin())
return self.limited_prev
elif psm.char == "$":
self.add(ast.MatchEnd())
return self.limited_prev
elif psm.char == ".":
t = ast.PatternChar()
t.pattern = psm.char
self.add(t)
return self.limited_prev
elif psm.char == "\\":
return EscapedChar(self.limited_prev,
as_single_chars=SpecialPattern.special_chars)
elif psm.char == "[":
return CharClass(self.limited_prev)
elif psm.char == "|":
self.parent.g.collapse_alt()
return self.limited_prev
# >>> Quantifiers
elif psm.char == "?" and quantified == ContentOfGroup.NotQuantified:
self.quantified = ContentOfGroup.Quantified
last = self._last_or_fail(psm)
if last:
last.quantifier = ast.NoneOrOnce()
return self.limited_prev
elif psm.char == "*" and quantified == ContentOfGroup.NotQuantified:
self.quantified = ContentOfGroup.Quantified
last = self._last_or_fail(psm)
if last:
last.quantifier = ast.NoneOrMore()
return self.limited_prev
elif psm.char == "+" and quantified == ContentOfGroup.NotQuantified:
self.quantified = ContentOfGroup.Quantified
last = self._last_or_fail(psm)
if last:
last.quantifier = ast.OneOrMore()
return self.limited_prev
elif psm.char == "{" and quantified == ContentOfGroup.NotQuantified:
self.quantified = ContentOfGroup.Quantified
t = MinimumOfRepetition(self.limited_prev)
last = self._last_or_fail(psm)
if last:
last.quantifier = t.between
return t
elif psm.char == "?" and quantified == ContentOfGroup.Quantified:
self.quantified = ContentOfGroup.UngreedyQuantified
last = self._last_or_fail(psm)
if last:
last.quantifier.greedy = False
return self.limited_prev
elif quantified == ContentOfGroup.Quantified:
psm.error = "unexpected quantifier"
elif quantified == ContentOfGroup.UngreedyQuantified:
psm.error = "quantifier repeated"
# <<< Quantifier
else:
t = ast.SingleChar()
t.char = psm.char
self.add(t)
return self.limited_prev
def _last_or_fail(self, psm: PSM):
if self.parent.g.group.seq:
return self.parent.g.group.seq[-1]
else:
psm.error = "nothing to repeat"
class MinimumOfRepetition:
def __init__(self, parent: ContentOfGroup):
self.parent = parent
self.between = ast.Between()
self.min = []
def next(self, psm: PSM):
if psm.char.isdigit():
self.min.append(psm.char)
return self
elif psm.char == ",":
self._interpret()
return MaximumOfRepetition(self)
elif psm.char == "}":
self._interpret()
return self.parent
else:
psm.error = 'expected digit, "," or "}"'
def _interpret(self):
if not self.min:
return
try:
count = int("".join(self.min))
except ValueError:
assert False, "internal error: cannot convert to number minimum of repetition"
self.between.min = count
class MaximumOfRepetition:
def __init__(self, repeat: MinimumOfRepetition):
self.repeat = repeat
self.max = []
def next(self, psm: PSM):
if psm.char.isdigit():
self.max.append(psm.char)
return self
elif psm.char == "}":
self._interpret()
return self.repeat.parent
else:
psm.error = 'expected digit, "," or "}"'
def _interpret(self):
if not self.max:
return
try:
count = int("".join(self.max))
except ValueError:
assert False, "internal error: cannot convert to number maximum of repetition"
self.repeat.between.max = count
#--------------------------------------
# Escaping
class EscapedChar:
def __init__(self, prev, as_single_chars=(), as_pattern_chars=()):
self.prev = prev # ContentOfGroup or CharClass
self.single_chars = as_single_chars
self.pattern_chars = as_pattern_chars
def next(self, psm: PSM):
if psm.char in SpecialPattern.individual_chars \
or psm.char in SpecialPattern.range_chars \
or psm.char in self.pattern_chars:
t = ast.PatternChar()
t.pattern = psm.char
self.prev.add(t)
return self.prev
elif psm.char in self.single_chars:
t = ast.SingleChar()
t.char = psm.char
self.prev.add(t)
return self.prev
elif psm.char == "x":
return AsciiChar(self.prev)
elif psm.char == "u":
return UnicodeChar(self.prev)
else:
psm.error = "unauthorized escape of {}".format(psm.char)
class AsciiChar:
def __init__(self, prev):
self.prev = prev # ContentOfGroup or CharClass
self.pattern = ast.PatternChar()
self.pattern.type = ast.PatternChar.Ascii
self.prev.add(self.pattern)
def next(self, psm: PSM):
if psm.char in string.hexdigits:
self.pattern.pattern += psm.char
count = len(self.pattern.pattern)
return self.prev if count >= 2 else self
else:
psm.error = "expected ASCII hexadecimal character"
class UnicodeChar:
def __init__(self, prev):
self.prev = prev # ContentOfGroup or CharClass
self.pattern = ast.PatternChar()
self.pattern.type = ast.PatternChar.Unicode
self.prev.add(self.pattern)
def next(self, psm: PSM):
if psm.char in string.hexdigits:
self.pattern.pattern += psm.char
count = len(self.pattern.pattern)
return self.prev if count >= 4 else self
else:
psm.error = "expected ASCII hexadecimal character"
#-------------------------------------
# Character class
class WrappedCharClass:
def __init__(self):
# ast is CharClass or may be changed to PatternClass in one case
self.ast = ast.CharClass()
def add(self, other):
assert isinstance(self.ast, ast.CharClass)
self.ast.elems = self.ast.elems + (other,)
def pop(self):
assert isinstance(self.ast, ast.CharClass)
last = self.ast.elems[-1]
self.ast.elems = self.ast.elems[:-1]
return last
class CharClass:
def __init__(self, prev):
self.prev = prev # ContentOfGroup or CharClass
self.q = WrappedCharClass()
# forward function
self.add = self.q.add
self.next_is_range = False
self.empty = True
self.can_mutate = True
def next(self, psm: PSM):
this_should_be_range = self.next_is_range
self.next_is_range = False
this_is_empty = self.empty
self.empty = False
if psm.char == "\\":
self.can_mutate = False
self.next_is_range = this_should_be_range
return EscapedChar(self,
as_single_chars=SpecialPattern.restrict_special_chars)
elif this_should_be_range and psm.char != "]":
assert isinstance(self.q.ast, ast.CharClass)
assert len(self.q.ast.elems) >= 1
self.next_is_range = False
t = ast.Range()
t.begin = self.q.pop()
t.end = ast.SingleChar()
t.end.char = psm.char
self.q.add(t)
return self
elif psm.char == "^":
# if at the begining, it has a special meaning
if this_is_empty:
self.can_mutate = False
self.q.ast.negate = True
else:
t = ast.SingleChar()
t.char = psm.char
self.q.add(t)
return self
elif psm.char == "]":
if this_should_be_range:
t = ast.SingleChar()
t.char = "-"
self.q.add(t)
else:
self.mutate_if_posix_like()
self.prev.add(self.q.ast)
return self.prev
elif psm.char == "[":
return CharClass(self)
elif psm.char == "-" and len(self.q.ast.elems) >= 1:
self.next_is_range = True
return self
else:
t = ast.SingleChar()
t.char = psm.char
self.q.add(t)
return self
def mutate_if_posix_like(self):
"""
Change from character class to pattern char if the content is matching
POSIX-like classe.
"""
assert isinstance(self.q.ast, ast.CharClass)
# put in this variable everything that had happen but not saved into
# the single char object
# because mutation is only possible if the exact string of the content
# match a pre-definied list, so if an unlogged char is consumed, it
# must prevent mutation
if not self.can_mutate:
return
if len(self.q.ast.elems) < SpecialPattern.min_len_posix_class + 2:
return
opening = self.q.ast.elems[0]
if not isinstance(opening, ast.SingleChar) or opening.char != ":":
return
closing = self.q.ast.elems[-1]
if not isinstance(closing, ast.SingleChar) or closing.char != ":":
return
is_only_ascii = lambda x: (isinstance(x, ast.SingleChar)
and len(x.char) == 1
and x.char.isalpha())
class_may_be_a_word = not any(
not is_only_ascii(x) for x in self.q.ast.elems[1:-1])
if not class_may_be_a_word:
return
word = "".join(s.char for s in self.q.ast.elems[1:-1])
if word not in SpecialPattern.posix_classes:
return
t = ast.PatternChar()
t.pattern = word
t.type = ast.PatternChar.Posix
self.q.ast = t
#-------------------------------------
def parse(expr, **kw):
sm = PSM()
sm.source = Source(expr)
sm.starts_with(OpeningOfGroup(parent=None, initial=True))
sm.pre_action = kw.get("pre_action", None)
sm.post_action = kw.get("post_action", None)
sm.parse()
return sm.state.g.group
| VaysseB/id_generator | src/parser.py | Python | gpl-3.0 | 15,217 |
'''WARCAT: Web ARChive (WARC) Archiving Tool
Tool and library for handling Web ARChive (WARC) files.
'''
from .version import *
| chfoo/warcat | warcat/__init__.py | Python | gpl-3.0 | 130 |
import math
from PyQt5.QtCore import Qt, pyqtSlot
from PyQt5.QtGui import QCloseEvent
from PyQt5.QtWidgets import QDialog, QInputDialog
from urh import settings
from urh.models.FuzzingTableModel import FuzzingTableModel
from urh.signalprocessing.ProtocoLabel import ProtocolLabel
from urh.signalprocessing.ProtocolAnalyzerContainer import ProtocolAnalyzerContainer
from urh.ui.ui_fuzzing import Ui_FuzzingDialog
class FuzzingDialog(QDialog):
def __init__(self, protocol: ProtocolAnalyzerContainer, label_index: int, msg_index: int, proto_view: int,
parent=None):
super().__init__(parent)
self.ui = Ui_FuzzingDialog()
self.ui.setupUi(self)
self.setAttribute(Qt.WA_DeleteOnClose)
self.setWindowFlags(Qt.Window)
self.protocol = protocol
msg_index = msg_index if msg_index != -1 else 0
self.ui.spinBoxFuzzMessage.setValue(msg_index + 1)
self.ui.spinBoxFuzzMessage.setMinimum(1)
self.ui.spinBoxFuzzMessage.setMaximum(self.protocol.num_messages)
self.ui.comboBoxFuzzingLabel.addItems([l.name for l in self.message.message_type])
self.ui.comboBoxFuzzingLabel.setCurrentIndex(label_index)
self.proto_view = proto_view
self.fuzz_table_model = FuzzingTableModel(self.current_label, proto_view)
self.fuzz_table_model.remove_duplicates = self.ui.chkBRemoveDuplicates.isChecked()
self.ui.tblFuzzingValues.setModel(self.fuzz_table_model)
self.fuzz_table_model.update()
self.ui.spinBoxFuzzingStart.setValue(self.current_label_start + 1)
self.ui.spinBoxFuzzingEnd.setValue(self.current_label_end)
self.ui.spinBoxFuzzingStart.setMaximum(len(self.message_data))
self.ui.spinBoxFuzzingEnd.setMaximum(len(self.message_data))
self.update_message_data_string()
self.ui.tblFuzzingValues.resize_me()
self.create_connects()
self.restoreGeometry(settings.read("{}/geometry".format(self.__class__.__name__), type=bytes))
@property
def message(self):
return self.protocol.messages[int(self.ui.spinBoxFuzzMessage.value() - 1)]
@property
def current_label_index(self):
return self.ui.comboBoxFuzzingLabel.currentIndex()
@property
def current_label(self) -> ProtocolLabel:
if len(self.message.message_type) == 0:
return None
cur_label = self.message.message_type[self.current_label_index].get_copy()
self.message.message_type[self.current_label_index] = cur_label
cur_label.fuzz_values = [fv for fv in cur_label.fuzz_values if fv] # Remove empty strings
if len(cur_label.fuzz_values) == 0:
cur_label.fuzz_values.append(self.message.plain_bits_str[cur_label.start:cur_label.end])
return cur_label
@property
def current_label_start(self):
if self.current_label and self.message:
return self.message.get_label_range(self.current_label, self.proto_view, False)[0]
else:
return -1
@property
def current_label_end(self):
if self.current_label and self.message:
return self.message.get_label_range(self.current_label, self.proto_view, False)[1]
else:
return -1
@property
def message_data(self):
if self.proto_view == 0:
return self.message.plain_bits_str
elif self.proto_view == 1:
return self.message.plain_hex_str
elif self.proto_view == 2:
return self.message.plain_ascii_str
else:
return None
def create_connects(self):
self.ui.spinBoxFuzzingStart.valueChanged.connect(self.on_fuzzing_start_changed)
self.ui.spinBoxFuzzingEnd.valueChanged.connect(self.on_fuzzing_end_changed)
self.ui.comboBoxFuzzingLabel.currentIndexChanged.connect(self.on_combo_box_fuzzing_label_current_index_changed)
self.ui.btnRepeatValues.clicked.connect(self.on_btn_repeat_values_clicked)
self.ui.btnAddRow.clicked.connect(self.on_btn_add_row_clicked)
self.ui.btnDelRow.clicked.connect(self.on_btn_del_row_clicked)
self.ui.tblFuzzingValues.deletion_wanted.connect(self.delete_lines)
self.ui.chkBRemoveDuplicates.stateChanged.connect(self.on_remove_duplicates_state_changed)
self.ui.sBAddRangeStart.valueChanged.connect(self.on_fuzzing_range_start_changed)
self.ui.sBAddRangeEnd.valueChanged.connect(self.on_fuzzing_range_end_changed)
self.ui.checkBoxLowerBound.stateChanged.connect(self.on_lower_bound_checked_changed)
self.ui.checkBoxUpperBound.stateChanged.connect(self.on_upper_bound_checked_changed)
self.ui.spinBoxLowerBound.valueChanged.connect(self.on_lower_bound_changed)
self.ui.spinBoxUpperBound.valueChanged.connect(self.on_upper_bound_changed)
self.ui.spinBoxRandomMinimum.valueChanged.connect(self.on_random_range_min_changed)
self.ui.spinBoxRandomMaximum.valueChanged.connect(self.on_random_range_max_changed)
self.ui.spinBoxFuzzMessage.valueChanged.connect(self.on_fuzz_msg_changed)
self.ui.btnAddFuzzingValues.clicked.connect(self.on_btn_add_fuzzing_values_clicked)
self.ui.comboBoxFuzzingLabel.editTextChanged.connect(self.set_current_label_name)
def update_message_data_string(self):
fuz_start = self.current_label_start
fuz_end = self.current_label_end
num_proto_bits = 10
num_fuz_bits = 16
proto_start = fuz_start - num_proto_bits
preambel = "... "
if proto_start <= 0:
proto_start = 0
preambel = ""
proto_end = fuz_end + num_proto_bits
postambel = " ..."
if proto_end >= len(self.message_data) - 1:
proto_end = len(self.message_data) - 1
postambel = ""
fuzamble = ""
if fuz_end - fuz_start > num_fuz_bits:
fuz_end = fuz_start + num_fuz_bits
fuzamble = "..."
self.ui.lPreBits.setText(preambel + self.message_data[proto_start:self.current_label_start])
self.ui.lFuzzedBits.setText(self.message_data[fuz_start:fuz_end] + fuzamble)
self.ui.lPostBits.setText(self.message_data[self.current_label_end:proto_end] + postambel)
self.set_add_spinboxes_maximum_on_label_change()
def closeEvent(self, event: QCloseEvent):
settings.write("{}/geometry".format(self.__class__.__name__), self.saveGeometry())
super().closeEvent(event)
@pyqtSlot(int)
def on_fuzzing_start_changed(self, value: int):
self.ui.spinBoxFuzzingEnd.setMinimum(self.ui.spinBoxFuzzingStart.value())
new_start = self.message.convert_index(value - 1, self.proto_view, 0, False)[0]
self.current_label.start = new_start
self.current_label.fuzz_values[:] = []
self.update_message_data_string()
self.fuzz_table_model.update()
self.ui.tblFuzzingValues.resize_me()
@pyqtSlot(int)
def on_fuzzing_end_changed(self, value: int):
self.ui.spinBoxFuzzingStart.setMaximum(self.ui.spinBoxFuzzingEnd.value())
new_end = self.message.convert_index(value - 1, self.proto_view, 0, False)[1] + 1
self.current_label.end = new_end
self.current_label.fuzz_values[:] = []
self.update_message_data_string()
self.fuzz_table_model.update()
self.ui.tblFuzzingValues.resize_me()
@pyqtSlot(int)
def on_combo_box_fuzzing_label_current_index_changed(self, index: int):
self.fuzz_table_model.fuzzing_label = self.current_label
self.fuzz_table_model.update()
self.update_message_data_string()
self.ui.tblFuzzingValues.resize_me()
self.ui.spinBoxFuzzingStart.blockSignals(True)
self.ui.spinBoxFuzzingStart.setValue(self.current_label_start + 1)
self.ui.spinBoxFuzzingStart.blockSignals(False)
self.ui.spinBoxFuzzingEnd.blockSignals(True)
self.ui.spinBoxFuzzingEnd.setValue(self.current_label_end)
self.ui.spinBoxFuzzingEnd.blockSignals(False)
@pyqtSlot()
def on_btn_add_row_clicked(self):
self.current_label.add_fuzz_value()
self.fuzz_table_model.update()
@pyqtSlot()
def on_btn_del_row_clicked(self):
min_row, max_row, _, _ = self.ui.tblFuzzingValues.selection_range()
self.delete_lines(min_row, max_row)
@pyqtSlot(int, int)
def delete_lines(self, min_row, max_row):
if min_row == -1:
self.current_label.fuzz_values = self.current_label.fuzz_values[:-1]
else:
self.current_label.fuzz_values = self.current_label.fuzz_values[:min_row] + self.current_label.fuzz_values[
max_row + 1:]
_ = self.current_label # if user deleted all, this will restore a fuzz value
self.fuzz_table_model.update()
@pyqtSlot()
def on_remove_duplicates_state_changed(self):
self.fuzz_table_model.remove_duplicates = self.ui.chkBRemoveDuplicates.isChecked()
self.fuzz_table_model.update()
self.remove_duplicates()
@pyqtSlot()
def set_add_spinboxes_maximum_on_label_change(self):
nbits = self.current_label.end - self.current_label.start # Use Bit Start/End for maximum calc.
if nbits >= 32:
nbits = 31
max_val = 2 ** nbits - 1
self.ui.sBAddRangeStart.setMaximum(max_val - 1)
self.ui.sBAddRangeEnd.setMaximum(max_val)
self.ui.sBAddRangeEnd.setValue(max_val)
self.ui.sBAddRangeStep.setMaximum(max_val)
self.ui.spinBoxLowerBound.setMaximum(max_val - 1)
self.ui.spinBoxUpperBound.setMaximum(max_val)
self.ui.spinBoxUpperBound.setValue(max_val)
self.ui.spinBoxBoundaryNumber.setMaximum(int(max_val / 2) + 1)
self.ui.spinBoxRandomMinimum.setMaximum(max_val - 1)
self.ui.spinBoxRandomMaximum.setMaximum(max_val)
self.ui.spinBoxRandomMaximum.setValue(max_val)
@pyqtSlot(int)
def on_fuzzing_range_start_changed(self, value: int):
self.ui.sBAddRangeEnd.setMinimum(value)
self.ui.sBAddRangeStep.setMaximum(self.ui.sBAddRangeEnd.value() - value)
@pyqtSlot(int)
def on_fuzzing_range_end_changed(self, value: int):
self.ui.sBAddRangeStart.setMaximum(value - 1)
self.ui.sBAddRangeStep.setMaximum(value - self.ui.sBAddRangeStart.value())
@pyqtSlot()
def on_lower_bound_checked_changed(self):
if self.ui.checkBoxLowerBound.isChecked():
self.ui.spinBoxLowerBound.setEnabled(True)
self.ui.spinBoxBoundaryNumber.setEnabled(True)
elif not self.ui.checkBoxUpperBound.isChecked():
self.ui.spinBoxLowerBound.setEnabled(False)
self.ui.spinBoxBoundaryNumber.setEnabled(False)
else:
self.ui.spinBoxLowerBound.setEnabled(False)
@pyqtSlot()
def on_upper_bound_checked_changed(self):
if self.ui.checkBoxUpperBound.isChecked():
self.ui.spinBoxUpperBound.setEnabled(True)
self.ui.spinBoxBoundaryNumber.setEnabled(True)
elif not self.ui.checkBoxLowerBound.isChecked():
self.ui.spinBoxUpperBound.setEnabled(False)
self.ui.spinBoxBoundaryNumber.setEnabled(False)
else:
self.ui.spinBoxUpperBound.setEnabled(False)
@pyqtSlot()
def on_lower_bound_changed(self):
self.ui.spinBoxUpperBound.setMinimum(self.ui.spinBoxLowerBound.value())
self.ui.spinBoxBoundaryNumber.setMaximum(math.ceil((self.ui.spinBoxUpperBound.value()
- self.ui.spinBoxLowerBound.value()) / 2))
@pyqtSlot()
def on_upper_bound_changed(self):
self.ui.spinBoxLowerBound.setMaximum(self.ui.spinBoxUpperBound.value() - 1)
self.ui.spinBoxBoundaryNumber.setMaximum(math.ceil((self.ui.spinBoxUpperBound.value()
- self.ui.spinBoxLowerBound.value()) / 2))
@pyqtSlot()
def on_random_range_min_changed(self):
self.ui.spinBoxRandomMaximum.setMinimum(self.ui.spinBoxRandomMinimum.value())
@pyqtSlot()
def on_random_range_max_changed(self):
self.ui.spinBoxRandomMinimum.setMaximum(self.ui.spinBoxRandomMaximum.value() - 1)
@pyqtSlot()
def on_btn_add_fuzzing_values_clicked(self):
if self.ui.comboBoxStrategy.currentIndex() == 0:
self.__add_fuzzing_range()
elif self.ui.comboBoxStrategy.currentIndex() == 1:
self.__add_fuzzing_boundaries()
elif self.ui.comboBoxStrategy.currentIndex() == 2:
self.__add_random_fuzzing_values()
def __add_fuzzing_range(self):
start = self.ui.sBAddRangeStart.value()
end = self.ui.sBAddRangeEnd.value()
step = self.ui.sBAddRangeStep.value()
self.fuzz_table_model.add_range(start, end + 1, step)
def __add_fuzzing_boundaries(self):
lower_bound = -1
if self.ui.spinBoxLowerBound.isEnabled():
lower_bound = self.ui.spinBoxLowerBound.value()
upper_bound = -1
if self.ui.spinBoxUpperBound.isEnabled():
upper_bound = self.ui.spinBoxUpperBound.value()
num_vals = self.ui.spinBoxBoundaryNumber.value()
self.fuzz_table_model.add_boundaries(lower_bound, upper_bound, num_vals)
def __add_random_fuzzing_values(self):
n = self.ui.spinBoxNumberRandom.value()
minimum = self.ui.spinBoxRandomMinimum.value()
maximum = self.ui.spinBoxRandomMaximum.value()
self.fuzz_table_model.add_random(n, minimum, maximum)
def remove_duplicates(self):
if self.ui.chkBRemoveDuplicates.isChecked():
for lbl in self.message.message_type:
seq = lbl.fuzz_values[:]
seen = set()
add_seen = seen.add
lbl.fuzz_values = [l for l in seq if not (l in seen or add_seen(l))]
@pyqtSlot()
def set_current_label_name(self):
self.current_label.name = self.ui.comboBoxFuzzingLabel.currentText()
self.ui.comboBoxFuzzingLabel.setItemText(self.ui.comboBoxFuzzingLabel.currentIndex(), self.current_label.name)
@pyqtSlot(int)
def on_fuzz_msg_changed(self, index: int):
self.ui.comboBoxFuzzingLabel.setDisabled(False)
sel_label_ind = self.ui.comboBoxFuzzingLabel.currentIndex()
self.ui.comboBoxFuzzingLabel.blockSignals(True)
self.ui.comboBoxFuzzingLabel.clear()
if len(self.message.message_type) == 0:
self.ui.comboBoxFuzzingLabel.setDisabled(True)
return
self.ui.comboBoxFuzzingLabel.addItems([lbl.name for lbl in self.message.message_type])
self.ui.comboBoxFuzzingLabel.blockSignals(False)
if sel_label_ind < self.ui.comboBoxFuzzingLabel.count():
self.ui.comboBoxFuzzingLabel.setCurrentIndex(sel_label_ind)
else:
self.ui.comboBoxFuzzingLabel.setCurrentIndex(0)
self.fuzz_table_model.fuzzing_label = self.current_label
self.fuzz_table_model.update()
self.update_message_data_string()
@pyqtSlot()
def on_btn_repeat_values_clicked(self):
num_repeats, ok = QInputDialog.getInt(self, self.tr("How many times shall values be repeated?"),
self.tr("Number of repeats:"), 1, 1)
if ok:
self.ui.chkBRemoveDuplicates.setChecked(False)
min_row, max_row, _, _ = self.ui.tblFuzzingValues.selection_range()
if min_row == -1:
start, end = 0, len(self.current_label.fuzz_values)
else:
start, end = min_row, max_row + 1
self.fuzz_table_model.repeat_fuzzing_values(start, end, num_repeats)
| jopohl/urh | src/urh/controller/dialogs/FuzzingDialog.py | Python | gpl-3.0 | 15,833 |
import os
import unittest
from vsg.rules import iteration_scheme
from vsg import vhdlFile
from vsg.tests import utils
sTestDir = os.path.dirname(__file__)
lFile, eError =vhdlFile.utils.read_vhdlfile(os.path.join(sTestDir,'rule_300_test_input.vhd'))
dIndentMap = utils.read_indent_file()
lExpected = []
lExpected.append('')
utils.read_file(os.path.join(sTestDir, 'rule_300_test_input.fixed.vhd'), lExpected)
class test_iteration_scheme_rule(unittest.TestCase):
def setUp(self):
self.oFile = vhdlFile.vhdlFile(lFile)
self.assertIsNone(eError)
self.oFile.set_indent_map(dIndentMap)
def test_rule_300(self):
oRule = iteration_scheme.rule_300()
self.assertTrue(oRule)
self.assertEqual(oRule.name, 'iteration_scheme')
self.assertEqual(oRule.identifier, '300')
lExpected = [13, 17]
oRule.analyze(self.oFile)
self.assertEqual(lExpected, utils.extract_violation_lines_from_violation_object(oRule.violations))
def test_fix_rule_300(self):
oRule = iteration_scheme.rule_300()
oRule.fix(self.oFile)
lActual = self.oFile.get_lines()
self.assertEqual(lExpected, lActual)
oRule.analyze(self.oFile)
self.assertEqual(oRule.violations, [])
| jeremiah-c-leary/vhdl-style-guide | vsg/tests/iteration_scheme/test_rule_300.py | Python | gpl-3.0 | 1,279 |
#
# LMirror is Copyright (C) 2010 Robert Collins <robertc@robertcollins.net>
#
# LMirror is free software: you can redistribute it and/or modify it under the
# terms of the GNU General Public License as published by the Free Software
# Foundation, either version 3 of the License, or (at your option) any later
# version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
# PARTICULAR PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# this program. If not, see <http://www.gnu.org/licenses/>.
#
# In the LMirror source tree the file COPYING.txt contains the GNU General Public
# License version 3.
#
"""Tests for logging support code."""
from StringIO import StringIO
import logging
import os.path
import time
from l_mirror import logging_support
from l_mirror.tests import ResourcedTestCase
from l_mirror.tests.logging_resource import LoggingResourceManager
from l_mirror.tests.stubpackage import TempDirResource
class TestLoggingSetup(ResourcedTestCase):
resources = [('logging', LoggingResourceManager())]
def test_configure_logging_sets_converter(self):
out = StringIO()
c_log, f_log, formatter = logging_support.configure_logging(out)
self.assertEqual(c_log, logging.root.handlers[0])
self.assertEqual(f_log, logging.root.handlers[1])
self.assertEqual(None, c_log.formatter)
self.assertEqual(formatter, f_log.formatter)
self.assertEqual(time.gmtime, formatter.converter)
self.assertEqual("%Y-%m-%d %H:%M:%SZ", formatter.datefmt)
self.assertEqual(logging.StreamHandler, c_log.__class__)
self.assertEqual(out, c_log.stream)
self.assertEqual(logging.FileHandler, f_log.__class__)
self.assertEqual(os.path.expanduser("~/.cache/lmirror/log"), f_log.baseFilename)
def test_can_supply_filename_None(self):
out = StringIO()
c_log, f_log, formatter = logging_support.configure_logging(out, None)
self.assertEqual(None, f_log)
| rbtcollins/lmirror | l_mirror/tests/test_logging_support.py | Python | gpl-3.0 | 2,180 |
# -*- coding: utf8 -*-
###########################################################################
# This is the package latexparser
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
###########################################################################
# copyright (c) Laurent Claessens, 2010,2012-2016
# email: laurent@claessens-donadello.eu
import codecs
from latexparser.InputPaths import InputPaths
class Occurrence(object):
"""
self.as_written : the code as it appears in the file, including \MyMacro, including the backslash.
self.position : the position at which this occurrence appears.
Example, if we look at the LatexCode
Hello word, \MyMacro{first}
and then \MyMacro{second}
the first occurrence of \MyMacro has position=12
"""
def __init__(self,name,arguments,as_written="",position=0):
self.arguments = arguments
self.number_of_arguments = len(arguments)
self.name = name
self.as_written = as_written
self.arguments_list = arguments
self.position = position
def configuration(self):
r"""
Return the way the arguments are separated in as_written.
Example, if we have
\MyMacro<space>{A}<tab>{B}
{C},
we return the list
["<space>","tab","\n"]
The following has to be true:
self.as_written == self.name+self.configuration()[0]+self.arguments_list[0]+etc.
"""
l=[]
a = self.as_written.split(self.name)[1]
for arg in self.arguments_list:
split = a.split("{"+arg+"}")
separator=split[0]
try:
a=split[1]
except IndexError:
print(self.as_written)
raise
l.append(separator)
return l
def change_argument(self,num,func):
r"""
Apply the function <func> to the <n>th argument of self. Then return a new object.
"""
n=num-1 # Internally, the arguments are numbered from 0.
arguments=self.arguments_list
configuration=self.configuration()
arguments[n]=func(arguments[n])
new_text=self.name
if len(arguments) != len(configuration):
print("Error : length of the configuration list has to be the same as the number of arguments")
raise ValueError
for i in range(len(arguments)):
new_text=new_text+configuration[i]+"{"+arguments[i]+"}"
return Occurrence(self.name,arguments,new_text,self.position)
def analyse(self):
return globals()["Occurrence_"+self.name[1:]](self) # We have to remove the initial "\" in the name of the macro.
def __getitem__(self,a):
return self.arguments[a]
def __str__(self):
return self.as_written
class Occurrence_newlabel(object):
r"""
takes an occurrence of \newlabel and creates an object which contains the information.
In the self.section_name we remove "\relax" from the string.
"""
def __init__(self,occurrence):
self.occurrence = occurrence
self.arguments = self.occurrence.arguments
if len(self.arguments) == 0 :
self.name = "Non interesting; probably the definition"
self.listoche = [None,None,None,None,None]
self.value,self.page,self.section_name,self.fourth,self.fifth=(None,None,None,None,None)
else :
self.name = self.arguments[0][0]
self.listoche = [a[0] for a in SearchArguments(self.arguments[1][0],5)[0]]
self.value = self.listoche[0]
self.page = self.listoche[1]
self.section_name = self.listoche[2].replace(r"\relax","")
self.fourth = self.listoche[3] # I don't know the role of the fourth argument of \newlabel
self.fifth = self.listoche[4] # I don't know the role of the fifth argument of \newlabel
class Occurrence_addInputPath(object):
def __init__(self,Occurrence):
self.directory=Occurrence[0]
class Occurrence_cite(object):
def __init__(self,occurrence):
self.label = occurrence[0]
def entry(self,codeBibtex):
return codeBibtex[self.label]
class Occurrence_newcommand(object):
def __init__(self,occurrence):
self.occurrence = occurrence
self.number_of_arguments = 0
if self.occurrence[1][1] == "[]":
self.number_of_arguments = self.occurrence[1][0]
self.name = self.occurrence[0][0]#[0]
self.definition = self.occurrence[-1][0]
class Occurrence_label(object):
def __init__(self,occurrence):
self.occurrence=occurrence
self.label=self.occurrence.arguments[0]
class Occurrence_ref(object):
def __init__(self,occurrence):
self.occurrence=occurrence
self.label=self.occurrence.arguments[0]
class Occurrence_eqref(object):
def __init__(self,occurrence):
self.occurrence=occurrence
self.label=self.occurrence.arguments[0]
class Occurrence_input(Occurrence):
def __init__(self,occurrence):
Occurrence.__init__(self,occurrence.name,occurrence.arguments,as_written=occurrence.as_written,position=occurrence.position)
self.occurrence = occurrence
self.filename = self.occurrence[0]
self.input_paths=InputPaths()
self._file_content=None # Make file_content "lazy"
def file_content(self,input_paths=None):
r"""
return the content of the file corresponding to this occurrence of
\input.
This is not recursive.
- 'input_path' is the list of paths in which we can search for files.
See the macro `\addInputPath` in the file
https://github.com/LaurentClaessens/mazhe/blob/master/configuration.tex
"""
import os.path
# Memoize
if self._file_content is not None :
return self._file_content
# At least, we are searching in the current directory :
if input_paths is None :
raise # Just to know who should do something like that
# Creating the filename
filename=self.filename
strict_filename = filename
if "." not in filename:
strict_filename=filename+".tex"
# Searching for the correct file in the subdirectories
fn=input_paths.get_file(strict_filename)
try:
# Without [:-1] I got an artificial empty line at the end.
text = "".join( codecs.open(fn,"r",encoding="utf8") )[:-1]
except IOError :
print("Warning : file %s not found."%strict_filename)
raise
self._file_content=text
return self._file_content
| LaurentClaessens/LaTeXparser | Occurrence.py | Python | gpl-3.0 | 7,331 |
#!/usr/bin/env python
#
# MCP320x
#
# Author: Maurik Holtrop
#
# This module interfaces with the MCP300x or MCP320x family of chips. These
# are 10-bit and 12-bit ADCs respectively. The x number indicates the number
# of multiplexed analog inputs: 2 (MCP3202), 4 (MCP3204) or 8 (MCP3208)
# Communications with this chip are over the SPI protocol.
# See: https://en.wikipedia.org/wiki/Serial_Peripheral_Interface_Bus
#
# The version of the code has two SPI interfaces: the builtin hardware
# SPI interface on the RPI, or a "bit-banged" GPIO version.
#
# Bit-Bang GPIO:
# We emulate a SPI port in software using the GPIO lines.
# This is a bit slower than the hardware interface, but it is far more
# clear what is going on, plus the RPi has only one SPI device.
# Connections: RPi GPIO to MCP320x
# CS_bar_pin = CS/SHDN
# CLK_pin = CLK
# MOSI_pin = D_in
# MISO_pin = D_out
#
# Hardware SPI:
# This uses the builtin hardware on the RPi. You need to enable this with the
# raspi-config program first. The data rate can be up to 1MHz.
# Connections: RPi pins to MCP320x
# CE0 or CE1 = CS/SHDN (chip select) set CS_bar = 0 or 1
# SCK = CLK set CLK_pin = 1000000 (transfer speed)
# MOSI = D_in set MOSI_pin = 0
# MISO = D_out set MISO_pin = 0
# The SPI protocol simulated here is MODE=0, CPHA=0, which has a positive polarity clock,
# (the clock is 0 at rest, active at 1) and a positive phase (0 to 1 transition) for reading
# or writing the data. Thus corresponds to the specifications of the MCP320x chips.
#
# From MCP3208 datasheet:
# Outging data : MCU latches data to A/D converter on rising edges of SCLK
# Incoming data: Data is clocked out of A/D converter on falling edges, so should be read on rising edge.
try:
import RPi.GPIO as GPIO
except ImportError as error:
pass
try:
import Adafruit_BBIO as GPIO
except ImportError as error:
pass
try:
import spidev
except ImportError as error:
pass
from DevLib.MyValues import MyValues
class MCP320x:
"""This is an class that implements an interface to the MCP320x ADC chips.
Standard is the MCP3208, but is will also work wiht the MCP3202, MCP3204, MCP3002, MCP3004 and MCP3008."""
def __init__(self, cs_bar_pin, clk_pin=1000000, mosi_pin=0, miso_pin=0, chip='MCP3208',
channel_max=None, bit_length=None, single_ended=True):
"""Initialize the code and set the GPIO pins.
The last argument, ch_max, is 2 for the MCP3202, 4 for the
MCP3204 or 8 for the MCS3208."""
self._CLK = clk_pin
self._MOSI = mosi_pin
self._MISO = miso_pin
self._CS_bar = cs_bar_pin
chip_dictionary = {
"MCP3202": (2, 12),
"MCP3204": (4, 12),
"MCP3208": (8, 12),
"MCP3002": (2, 10),
"MCP3004": (4, 10),
"MCP3008": (8, 10)
}
if chip in chip_dictionary:
self._ChannelMax = chip_dictionary[chip][0]
self._BitLength = chip_dictionary[chip][1]
elif chip is None and (channel_max is not None) and (bit_length is not None):
self._ChannelMax = channel_max
self._BitLength = bit_length
else:
print("Unknown chip: {} - Please re-initialize.")
self._ChannelMax = 0
self._BitLength = 0
return
self._SingleEnded = single_ended
self._Vref = 3.3
self._values = MyValues(self.read_adc, self._ChannelMax)
self._volts = MyValues(self.read_volts, self._ChannelMax)
# This is used to speed up the SPIDEV communication. Send out MSB first.
# control[0] - bit7-3: upper 5 bits 0, because we can only send 8 bit sequences.
# - bit2 : Start bit - starts conversion in ADCs
# - bit1 : Select single_ended=1 or differential=0
# - bit0 : D2 high bit of channel select.
# control[1] - bit7 : D1 middle bit of channel select.
# - bit6 : D0 low bit of channel select.
# - bit5-0 : Don't care.
if self._SingleEnded:
self._control0 = [0b00000110, 0b00100000, 0] # Pre-compute part of the control word.
else:
self._control0 = [0b00000100, 0b00100000, 0] # Pre-compute part of the control word.
if self._MOSI > 0: # Bing Bang mode
assert self._MISO != 0 and self._CLK < 32
if GPIO.getmode() != 11:
GPIO.setmode(GPIO.BCM) # Use the BCM numbering scheme
GPIO.setup(self._CLK, GPIO.OUT) # Setup the ports for in and output
GPIO.setup(self._MOSI, GPIO.OUT)
GPIO.setup(self._MISO, GPIO.IN)
GPIO.setup(self._CS_bar, GPIO.OUT)
GPIO.output(self._CLK, 0) # Set the clock low.
GPIO.output(self._MOSI, 0) # Set the Master Out low
GPIO.output(self._CS_bar, 1) # Set the CS_bar high
else:
self._dev = spidev.SpiDev(0, self._CS_bar) # Start a SpiDev device
self._dev.mode = 0 # Set SPI mode (phase)
self._dev.max_speed_hz = self._CLK # Set the data rate
self._dev.bits_per_word = 8 # Number of bit per word. ALWAYS 8
def __del__(self):
""" Cleanup the GPIO before being destroyed """
if self._MOSI > 0:
GPIO.cleanup(self._CS_bar)
GPIO.cleanup(self._CLK)
GPIO.cleanup(self._MOSI)
GPIO.cleanup(self._MISO)
def get_channel_max(self):
"""Return the maximum number of channels"""
return self._ChannelMax
def get_bit_length(self):
"""Return the number of bits that will be read"""
return self._BitLength
def get_value_max(self):
"""Return the maximum value possible for an ADC read"""
return 2 ** self._BitLength - 1
def send_bit(self, bit):
""" Send out a single bit, and pulse clock."""
if self._MOSI == 0:
return
#
# The input is read on the rising edge of the clock.
#
GPIO.output(self._MOSI, bit) # Set the bit.
GPIO.output(self._CLK, 1) # Rising edge sends data
GPIO.output(self._CLK, 0) # Return clock to zero.
def read_bit(self):
""" Read a single bit from the ADC and pulse clock."""
if self._MOSI == 0:
return 0
#
# The output is going out on the falling edge of the clock,
# and is to be read on the rising edge of the clock.
# Clock should be already low, and data should already be set.
GPIO.output(self._CLK, 1) # Set the clock high. Ready to read.
bit = GPIO.input(self._MISO) # Read the bit.
GPIO.output(self._CLK, 0) # Return clock low, next bit will be set.
return bit
def read_adc(self, channel):
"""This reads the actual ADC value, after connecting the analog multiplexer to
the desired channel.
ADC value is returned at a n-bit integer value, with n=10 or 12 depending on the chip.
The value can be converted to a voltage with:
volts = data*Vref/(2**n-1)"""
if channel < 0 or channel >= self._ChannelMax:
print("Error - chip does not have channel = {}".format(channel))
if self._MOSI == 0:
# SPIdev Code
# This builds up the control word, which selects the channel
# and sets single/differential more.
control = [self._control0[0] + ((channel & 0b100) >> 2), self._control0[1]+((channel & 0b011) << 6), 0]
dat = self._dev.xfer(control)
value = (dat[1] << 8)+dat[2] # Unpack the two 8-bit words to a single integer.
return value
else:
# Bit Bang code.
# To read out this chip you need to send:
# 1 - start bit
# 2 - Single ended (1) or differential (0) mode
# 3 - Channel select: 1 bit for x=2 or 3 bits for x=4,8
# 4 - MSB first (1) or LSB first (0)
#
# Start of sequence sets CS_bar low, and sends sequence
#
GPIO.output(self._CLK, 0) # Make sure clock starts low.
GPIO.output(self._MOSI, 0)
GPIO.output(self._CS_bar, 0) # Select the chip.
self.send_bit(1) # Start bit = 1
self.send_bit(self._SingleEnded) # Select single or differential
if self._ChannelMax > 2:
self.send_bit(int((channel & 0b100) > 0)) # Send high bit of channel = DS2
self.send_bit(int((channel & 0b010) > 0)) # Send mid bit of channel = DS1
self.send_bit(int((channel & 0b001) > 0)) # Send low bit of channel = DS0
else:
self.send_bit(channel)
self.send_bit(0) # MSB First (for MCP3x02) or don't care.
# The clock is currently low, and the dummy bit = 0 is on the output of the ADC
#
self.read_bit() # Read the bit.
data = 0
for i in range(self._BitLength):
# Note you need to shift left first, or else you shift the last bit (bit 0)
# to the 1 position.
data <<= 1
bit = self.read_bit()
data += bit
GPIO.output(self._CS_bar, 1) # Unselect the chip.
return data
def read_volts(self, channel):
"""Read the ADC value from channel and convert to volts, assuming that Vref is set correctly. """
return self._Vref * self.read_adc(channel) / self.get_value_max()
def fast_read_adc0(self):
"""This reads the actual ADC value of channel 0, with as little overhead as possible.
Use with SPIDEV ONLY!!!!
returns: The ADC value as an n-bit integer value, with n=10 or 12 depending on the chip."""
dat = self._dev.xfer(self._control0)
value = (dat[1] << 8) + dat[2]
return value
@property
def values(self):
"""ADC values presented as a list."""
return self._values
@property
def volts(self):
"""ADC voltages presented as a list"""
return self._volts
@property
def accuracy(self):
"""The fractional voltage of the least significant bit. """
return self._Vref / float(self.get_value_max())
@property
def vref(self):
"""Reference voltage used by the chip. You need to set this. It defaults to 3.3V"""
return self._Vref
@vref.setter
def vref(self, vr):
self._Vref = vr
def main(argv):
"""Test code for the MCP320x driver. This assumes you are using a MCP3208
If no arguments are supplied, then use SPIdev for CE0 and read channel 0"""
if len(argv) < 3:
print("Args : ", argv)
cs_bar = 0
clk_pin = 1000000
mosi_pin = 0
miso_pin = 0
if len(argv) < 2:
channel = 0
else:
channel = int(argv[1])
elif len(argv) < 6:
print("Please supply: cs_bar_pin clk_pin mosi_pin miso_pin channel")
sys.exit(1)
else:
cs_bar = int(argv[1])
clk_pin = int(argv[2])
mosi_pin = int(argv[3])
miso_pin = int(argv[4])
channel = int(argv[5])
adc_chip = MCP320x(cs_bar, clk_pin, mosi_pin, miso_pin)
try:
while True:
value = adc_chip.read_adc(channel)
print("{:4d}".format(value))
time.sleep(0.1)
except KeyboardInterrupt:
sys.exit(0)
if __name__ == '__main__':
import sys
import time
main(sys.argv)
| mholtrop/Phys605 | Python/DevLib/MCP320x.py | Python | gpl-3.0 | 11,971 |
# __init__.py
# Copyright (C) 2006, 2007, 2008, 2009, 2010 Michael Bayer mike_mp@zzzcomputing.com
#
# This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
__version__ = '0.3.4'
| codendev/rapidwsgi | src/mako/__init__.py | Python | gpl-3.0 | 256 |
#!/usr/bin/python
import sys
print "divsum_analysis.py DivsumFile NumberOfNucleotides"
try:
file = sys.argv[1]
except:
file = raw_input("Introduce RepeatMasker's Divsum file: ")
try:
nucs = sys.argv[2]
except:
nucs = raw_input("Introduce number of analysed nucleotides: ")
nucs = int(nucs)
data = open(file).readlines()
s_matrix = data.index("Coverage for each repeat class and divergence (Kimura)\n")
matrix = []
elements = data[s_matrix+1]
elements = elements.split()
for element in elements[1:]:
matrix.append([element,[]])
n_el = len(matrix)
for line in data[s_matrix+2:]:
# print line
info = line.split()
info = info[1:]
for n in range(0,n_el):
matrix[n][1].append(int(info[n]))
abs = open(file+".abs", "w")
rel = open(file+".rel", "w")
for n in range(0,n_el):
abs.write("%s\t%s\n" % (matrix[n][0], sum(matrix[n][1])))
rel.write("%s\t%s\n" % (matrix[n][0], round(1.0*sum(matrix[n][1])/nucs,100)))
| fjruizruano/ngs-protocols | divsum_analysis.py | Python | gpl-3.0 | 974 |
"""
Copyright 2014 Jason Heeris, jason.heeris@gmail.com
This file is part of the dungeon excavator web interface ("webcavate").
Webcavate is free software: you can redistribute it and/or modify it under the
terms of the GNU General Public License as published by the Free Software
Foundation, either version 3 of the License, or (at your option) any later
version.
Webcavate is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
PARTICULAR PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License along with
webcavate. If not, see <http://www.gnu.org/licenses/>.
"""
import argparse
import uuid
from flask import Flask, render_template, request, make_response, redirect, url_for, flash
from dungeon.excavate import render_room
HELP_TEXT = """\
Web interface to the dungeon excavator."""
app = Flask('dungeon.web')
app.secret_key = str(uuid.uuid4())
@app.route("/")
def root():
""" Web interface landing page. """
return render_template('index.html')
@app.route("/error")
def error():
""" Display errors. """
return render_template('error.html')
def make_map(request, format):
tile_size = int(request.form['size'])
wall_file = request.files['walls']
floor_file = request.files['floor']
floorplan_file = request.files['floorplan']
try:
room_data, content_type = render_room(
floor_file.read(),
wall_file.read(),
floorplan_file.read(),
tile_size,
format
)
except ValueError as ve:
flash(str(ve))
return redirect(url_for('error'))
# Create response
response = make_response(room_data)
response.headers['Content-Type'] = content_type
return response
@app.route("/map.svg", methods=['POST'])
def map_svg():
return make_map(request, format='svg')
@app.route("/map.png", methods=['POST'])
def map_png():
return make_map(request, format='png')
@app.route("/map.jpg", methods=['POST'])
def map_jpg():
return make_map(request, format='jpg')
@app.route("/map", methods=['POST'])
def process():
""" Process submitted form data. """
format = request.form['format']
try:
node = {
'png': 'map_png',
'svg': 'map_svg',
'jpg': 'map_jpg',
}[format]
except KeyError:
flash("The output format you selected is not supported.")
return redirect(url_for('error'))
else:
return redirect(url_for(node, _method='POST'), code=307)
def main():
""" Parse arguments and get things going for the web interface """
parser = argparse.ArgumentParser(description=HELP_TEXT)
parser.add_argument(
'-p', '--port',
help="Port to serve the interface on.",
type=int,
default=5050
)
parser.add_argument(
'-a', '--host',
help="Host to server the interface on.",
)
args = parser.parse_args()
app.run(port=args.port, host=args.host, debug=False)
| detly/webcavate | webcavate/app.py | Python | gpl-3.0 | 3,132 |
import subprocess
import time
import sys
import re
class checkIfUp:
__shellPings = []
__shell2Nbst = []
__ipsToCheck = []
checkedIps = 0
onlineIps = 0
unreachable = 0
timedOut = 0
upIpsAddress = []
computerName = []
completeMacAddress = []
executionTime = 0
def __init__(self,fromIp,toIp):
startTime = time.time()
self.fromIp = fromIp # from 192.168.1.x
self.toIp = toIp # to 192.168.x.x
self.__checkIfIpIsValid(fromIp)
self.__checkIfIpIsValid(toIp)
self.__getRange(fromIp,toIp)
self.__shellToQueue()
#self.__checkIfUp() # run by the shellToQueue queue organizer
self.__computerInfoInQueue()
endTime = time.time()
self.executionTime = round(endTime - startTime,3)
def __checkIfIpIsValid(self,ip):
def validateRange(val):
# valid range => 1 <-> 255
try:
val = int(val)
if val < 0 or val > 255:
print "Invalid IP Range ("+str(val)+")"
sys.exit(0)
except:
print "Invalid IP"
sys.exit(0)
ip = ip.split(".")
firstVal = validateRange(ip[0])
secondVal = validateRange(ip[1])
thirdVal = validateRange(ip[2])
fourthVal = validateRange(ip[3])
return True
def __getRange(self,fromIp,toIp):
fromIp = fromIp.split(".")
toIp = toIp.split(".")
# toIp must be > fromIp
def ip3chars(ipBlock):
# input 1; output 001
ipBlock = str(ipBlock)
while len(ipBlock) != 3:
ipBlock = "0"+ipBlock
return ipBlock
fromIpRaw = ip3chars(fromIp[0])+ip3chars(fromIp[1])+ip3chars(fromIp[2])+ip3chars(fromIp[3])
toIpRaw = ip3chars(toIp[0])+ip3chars(toIp[1])+ip3chars(toIp[2])+ip3chars(toIp[3])
if fromIpRaw > toIpRaw:
# if from is bigger switch the order
temp = fromIp
fromIp = toIp
toIp = temp
currentIp = [0,0,0,0]
# all to integers
currentIp0 = int(fromIp[0])
currentIp1 = int(fromIp[1])
currentIp2 = int(fromIp[2])
currentIp3 = int(fromIp[3])
toIp0 = int(toIp[0])
toIp1 = int(toIp[1])
toIp2 = int(toIp[2])
toIp3 = int(toIp[3])
firstIp = str(currentIp0)+"."+str(currentIp1)+"."+str(currentIp2)+"."+str(currentIp3)
self.__ipsToCheck = [firstIp]
while currentIp3 != toIp3 or currentIp2 != toIp2 or currentIp1 != toIp1 or currentIp0 != toIp0:
currentIp3 += 1
if currentIp3 > 255:
currentIp3 = 0
currentIp2 += 1
if currentIp2 > 255:
currentIp2 = 0
currentIp1 += 1
if currentIp1 > 255:
currentIp1 = 0
currentIp0 += 1
addIp = str(currentIp0)+"."+str(currentIp1)+"."+str(currentIp2)+"."+str(currentIp3)
self.__ipsToCheck.append(addIp)
def __shellToQueue(self):
# write them in the shell queue
maxPingsAtOnce = 200
currentQueuedPings = 0
for pingIp in self.__ipsToCheck:
proc = subprocess.Popen(['ping','-n','1',pingIp],stdout=subprocess.PIPE,shell=True)
self.__shellPings.append(proc)
currentQueuedPings += 1
if currentQueuedPings >= maxPingsAtOnce:
#execute shells
self.__checkIfUp()
currentQueuedPings = 0
self.__shellPings = []
self.__checkIfUp() # execute last queue
def __checkIfUp(self):
# execute the shells & determine whether the host is up or not
for shellInQueue in self.__shellPings:
pingResult = ""
shellInQueue.wait()
while True:
line = shellInQueue.stdout.readline()
if line != "":
pingResult += line
else:
break;
self.checkedIps += 1
if 'unreachable' in pingResult:
self.unreachable += 1
elif 'timed out' in pingResult:
self.timedOut += 1
else:
self.onlineIps += 1
currentIp = self.__ipsToCheck[self.checkedIps-1]
self.upIpsAddress.append(currentIp)
def __computerInfoInQueue(self):
# shell queue for online hosts
maxShellsAtOnce = 255
currentQueuedNbst = 0
for onlineIp in self.upIpsAddress:
proc = subprocess.Popen(['\\Windows\\sysnative\\nbtstat.exe','-a',onlineIp],stdout=subprocess.PIPE,shell=True)
self.__shell2Nbst.append(proc)
currentQueuedNbst += 1
if currentQueuedNbst >= maxShellsAtOnce:
# execute shells
self.__gatherComputerInfo()
currentQueuedNbst = 0
self.__shell2Nbst = []
self.__gatherComputerInfo() # execute last queue
def __gatherComputerInfo(self):
# execute the shells and find host Name and MAC
for shellInQueue in self.__shell2Nbst:
nbstResult = ""
shellInQueue.wait()
computerNameLine = ""
macAddressLine = ""
computerName = ""
macAddress = ""
while True:
line = shellInQueue.stdout.readline()
if line != "":
if '<00>' in line and 'UNIQUE' in line:
computerNameLine = line
if 'MAC Address' in line:
macAddressLine = line
else:
break;
computerName = re.findall('([ ]+)(.*?)([ ]+)<00>', computerNameLine)
macAddress = re.findall('([A-Z0-9]+)-([A-Z0-9]+)-([A-Z0-9]+)-([A-Z0-9]+)-([A-Z0-9]+)-([A-Z0-9]+)',macAddressLine)
try:
self.computerName.append(computerName[0][1])
except:
self.computerName.append("")
completeMacAddress = ""
firstMacElement = 0
try:
for macEach in macAddress[0]:
if firstMacElement == 0:
firstMacElement += 1
else:
completeMacAddress += ":"
completeMacAddress += macEach
firstMacElement = 0
except:
completeMacAddress = ""
self.completeMacAddress.append(completeMacAddress)
def readValue(self):
# debugging use only
ips = []
for ip in self.completeMacAddress:
ips.append(ip)
return ips
print "\t\t---LANScanner v1.0---\n"
# brief tutorial
print "Sample input data:"
print "FromIP: 192.168.1.50"
print "ToIP: 192.168.1.20"
print "---"
# input
fromIp = raw_input("From: ")
toIp = raw_input("To: ")
# enter values to class
userRange = checkIfUp(fromIp,toIp)
# read class values
print ""
#print userRange.readValue() # debugging use only
print "Checked",userRange.checkedIps,"IPs"
print ""
print "Online:",str(userRange.onlineIps)+"/"+str(userRange.checkedIps)
print "Unreachable:",userRange.unreachable,"Timed out:",userRange.timedOut
print "" # newline
print "Online IPs:"
print "IP\t\tNAME\t\tMAC"
counter = 0
for onlineIp in userRange.upIpsAddress:
print onlineIp+"\t"+userRange.computerName[counter]+"\t"+userRange.completeMacAddress[counter]
counter += 1
print ""
print "Took",userRange.executionTime,"seconds" | mixedup4x4/Speedy | Contents/LanScan.py | Python | gpl-3.0 | 7,956 |
# This software is licensed under the "GPLv3" License as described in the "LICENSE" file,
# which should be included with this package. The terms are also available at
# http://www.gnu.org/licenses/gpl-3.0.html
import re
from PyQt5.QtCore import Qt, pyqtSlot
from PyQt5.QtWidgets import (
QPushButton,
QLineEdit,
QVBoxLayout,
QGridLayout,
QDialog,
QTableView,
QAbstractItemView,
QSpacerItem,
QSizePolicy,
QHeaderView,
)
from .exclude_list_table import ExcludeListTable
from core.exclude import AlreadyThereException
from hscommon.trans import trget
tr = trget("ui")
class ExcludeListDialog(QDialog):
def __init__(self, app, parent, model, **kwargs):
flags = Qt.CustomizeWindowHint | Qt.WindowTitleHint | Qt.WindowSystemMenuHint
super().__init__(parent, flags, **kwargs)
self.app = app
self.specific_actions = frozenset()
self._setupUI()
self.model = model # ExcludeListDialogCore
self.model.view = self
self.table = ExcludeListTable(app, view=self.tableView) # Qt ExcludeListTable
self._row_matched = False # test if at least one row matched our test string
self._input_styled = False
self.buttonAdd.clicked.connect(self.addStringFromLineEdit)
self.buttonRemove.clicked.connect(self.removeSelected)
self.buttonRestore.clicked.connect(self.restoreDefaults)
self.buttonClose.clicked.connect(self.accept)
self.buttonHelp.clicked.connect(self.display_help_message)
self.buttonTestString.clicked.connect(self.onTestStringButtonClicked)
self.inputLine.textEdited.connect(self.reset_input_style)
self.testLine.textEdited.connect(self.reset_input_style)
self.testLine.textEdited.connect(self.reset_table_style)
def _setupUI(self):
layout = QVBoxLayout(self)
gridlayout = QGridLayout()
self.buttonAdd = QPushButton(tr("Add"))
self.buttonRemove = QPushButton(tr("Remove Selected"))
self.buttonRestore = QPushButton(tr("Restore defaults"))
self.buttonTestString = QPushButton(tr("Test string"))
self.buttonClose = QPushButton(tr("Close"))
self.buttonHelp = QPushButton(tr("Help"))
self.inputLine = QLineEdit()
self.testLine = QLineEdit()
self.tableView = QTableView()
triggers = (
QAbstractItemView.DoubleClicked | QAbstractItemView.EditKeyPressed | QAbstractItemView.SelectedClicked
)
self.tableView.setEditTriggers(triggers)
self.tableView.setSelectionMode(QTableView.ExtendedSelection)
self.tableView.setSelectionBehavior(QTableView.SelectRows)
self.tableView.setShowGrid(False)
vheader = self.tableView.verticalHeader()
vheader.setSectionsMovable(True)
vheader.setVisible(False)
hheader = self.tableView.horizontalHeader()
hheader.setSectionsMovable(False)
hheader.setSectionResizeMode(QHeaderView.Fixed)
hheader.setStretchLastSection(True)
hheader.setHighlightSections(False)
hheader.setVisible(True)
gridlayout.addWidget(self.inputLine, 0, 0)
gridlayout.addWidget(self.buttonAdd, 0, 1, Qt.AlignLeft)
gridlayout.addWidget(self.buttonRemove, 1, 1, Qt.AlignLeft)
gridlayout.addWidget(self.buttonRestore, 2, 1, Qt.AlignLeft)
gridlayout.addWidget(self.buttonHelp, 3, 1, Qt.AlignLeft)
gridlayout.addWidget(self.buttonClose, 4, 1)
gridlayout.addWidget(self.tableView, 1, 0, 6, 1)
gridlayout.addItem(QSpacerItem(0, 0, QSizePolicy.Minimum, QSizePolicy.Expanding), 4, 1)
gridlayout.addWidget(self.buttonTestString, 6, 1)
gridlayout.addWidget(self.testLine, 6, 0)
layout.addLayout(gridlayout)
self.inputLine.setPlaceholderText(tr("Type a python regular expression here..."))
self.inputLine.setFocus()
self.testLine.setPlaceholderText(tr("Type a file system path or filename here..."))
self.testLine.setClearButtonEnabled(True)
# --- model --> view
def show(self):
super().show()
self.inputLine.setFocus()
@pyqtSlot()
def addStringFromLineEdit(self):
text = self.inputLine.text()
if not text:
return
try:
self.model.add(text)
except AlreadyThereException:
self.app.show_message("Expression already in the list.")
return
except Exception as e:
self.app.show_message(f"Expression is invalid: {e}")
return
self.inputLine.clear()
def removeSelected(self):
self.model.remove_selected()
def restoreDefaults(self):
self.model.restore_defaults()
def onTestStringButtonClicked(self):
input_text = self.testLine.text()
if not input_text:
self.reset_input_style()
return
# If at least one row matched, we know whether table is highlighted or not
self._row_matched = self.model.test_string(input_text)
self.table.refresh()
# Test the string currently in the input text box as well
input_regex = self.inputLine.text()
if not input_regex:
self.reset_input_style()
return
compiled = None
try:
compiled = re.compile(input_regex)
except re.error:
self.reset_input_style()
return
if self.model.is_match(input_text, compiled):
self.inputLine.setStyleSheet("background-color: rgb(10, 200, 10);")
self._input_styled = True
else:
self.reset_input_style()
def reset_input_style(self):
"""Reset regex input line background"""
if self._input_styled:
self.inputLine.setStyleSheet(self.styleSheet())
self._input_styled = False
def reset_table_style(self):
if self._row_matched:
self._row_matched = False
self.model.reset_rows_highlight()
self.table.refresh()
def display_help_message(self):
self.app.show_message(
tr(
"""\
These (case sensitive) python regular expressions will filter out files during scans.<br>\
Directores will also have their <strong>default state</strong> set to Excluded \
in the Directories tab if their name happens to match one of the selected regular expressions.<br>\
For each file collected, two tests are performed to determine whether or not to completely ignore it:<br>\
<li>1. Regular expressions with no path separator in them will be compared to the file name only.</li>
<li>2. Regular expressions with at least one path separator in them will be compared to the full path to the file.</li><br>
Example: if you want to filter out .PNG files from the "My Pictures" directory only:<br>\
<code>.*My\\sPictures\\\\.*\\.png</code><br><br>\
You can test the regular expression with the "test string" button after pasting a fake path in the test field:<br>\
<code>C:\\\\User\\My Pictures\\test.png</code><br><br>
Matching regular expressions will be highlighted.<br>\
If there is at least one highlight, the path or filename tested will be ignored during scans.<br><br>\
Directories and files starting with a period '.' are filtered out by default.<br><br>"""
)
)
| arsenetar/dupeguru | qt/exclude_list_dialog.py | Python | gpl-3.0 | 7,359 |
from bottle import route, template, error, request, static_file, get, post
from index import get_index
from bmarks import get_bmarks
from tags import get_tags
from add import add_tags
from bmarklet import get_bmarklet
from account import get_account
from edit_tags import get_edit_tags
from importbm import get_import_bm
from edit import do_edit
from login import do_login
from register import do_register
@route('/')
def myroot():
return_data = get_index()
return return_data
@route('/account', method=['GET', 'POST'])
def bmarks():
return_data = get_bmarklet()
return return_data
@route('/add', method=['GET', 'POST'])
def bmarks():
return_data = add_tags()
return return_data
@route('/bmarklet')
def bmarks():
return_data = get_bmarklet()
return return_data
@route('/bmarks')
def bmarks():
return_data = get_bmarks()
return return_data
@route('/edit', method=['GET', 'POST'])
def bmarks():
return_data = do_edit()
return return_data
@route('/edit_tags', method=['GET', 'POST'])
def bmarks():
return_data = get_edit_tags()
return return_data
@route('/import', method=['GET', 'POST'])
def bmarks():
return_data = get_import_bm()
return return_data
@route('/login', method=['GET', 'POST'])
def bmarks():
return_data = do_login()
return return_data
@route('/register', method=['GET', 'POST'])
def bmarks():
return_data = do_register()
return return_data
@route('/tags')
def bmarks():
return_data = get_tags()
return return_data
# serve css
@get('/<filename:re:.*\.css>')
def send_css(filename):
return static_file(filename, root='css')
# serve javascript
@get('/<filename:re:.*\.js>')
def send_js(filename):
return static_file(filename, root='js')
# serve images
@get('<filename:re:.*\.png>')
def send_img(filename):
return static_file(filename, root='images')
# serve fonts
@get('<filename:re:.*\.(woff|woff2)>')
def send_font(filename):
return static_file(filename, root='fonts')
@error(404)
def handle404(error):
return '<H1>Ooops, its not here<BR>'
@error(500)
def handle500(error):
return '<H1>Oops, its broken: {}<BR>'.format(error)
| netllama/tastipy | tastiapp.py | Python | gpl-3.0 | 2,172 |
#!/usr/bin/env python
class Message(object):
"""
Base type of a message sent through the pipeline.
Define some attributes and methods to form your message.
I suggest you don't alter this class. You're are free to do so, of course. It's your own decision.
Though, I suggest you create your own message type and let it inherit from this class.
"""
pass
| lumannnn/pypifi | pypifi/message.py | Python | gpl-3.0 | 397 |
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import itertools
import json
import erpnext
import frappe
import copy
from erpnext.controllers.item_variant import (ItemVariantExistsError,
copy_attributes_to_variant, get_variant, make_variant_item_code, validate_item_variant_attributes)
from erpnext.setup.doctype.item_group.item_group import (get_parent_item_groups, invalidate_cache_for)
from frappe import _, msgprint
from frappe.utils import (cint, cstr, flt, formatdate, get_timestamp, getdate,
now_datetime, random_string, strip)
from frappe.utils.html_utils import clean_html
from frappe.website.doctype.website_slideshow.website_slideshow import \
get_slideshow
from frappe.website.render import clear_cache
from frappe.website.website_generator import WebsiteGenerator
from six import iteritems
class DuplicateReorderRows(frappe.ValidationError):
pass
class StockExistsForTemplate(frappe.ValidationError):
pass
class InvalidBarcode(frappe.ValidationError):
pass
class Item(WebsiteGenerator):
website = frappe._dict(
page_title_field="item_name",
condition_field="show_in_website",
template="templates/generators/item.html",
no_cache=1
)
def onload(self):
super(Item, self).onload()
self.set_onload('stock_exists', self.stock_ledger_created())
self.set_asset_naming_series()
def set_asset_naming_series(self):
if not hasattr(self, '_asset_naming_series'):
from erpnext.assets.doctype.asset.asset import get_asset_naming_series
self._asset_naming_series = get_asset_naming_series()
self.set_onload('asset_naming_series', self._asset_naming_series)
def autoname(self):
if frappe.db.get_default("item_naming_by") == "Naming Series":
if self.variant_of:
if not self.item_code:
template_item_name = frappe.db.get_value("Item", self.variant_of, "item_name")
self.item_code = make_variant_item_code(self.variant_of, template_item_name, self)
else:
from frappe.model.naming import set_name_by_naming_series
set_name_by_naming_series(self)
self.item_code = self.name
self.item_code = strip(self.item_code)
self.name = self.item_code
def before_insert(self):
if not self.description:
self.description = self.item_name
# if self.is_sales_item and not self.get('is_item_from_hub'):
# self.publish_in_hub = 1
def after_insert(self):
'''set opening stock and item price'''
if self.standard_rate:
for default in self.item_defaults:
self.add_price(default.default_price_list)
if self.opening_stock:
self.set_opening_stock()
def validate(self):
self.get_doc_before_save()
super(Item, self).validate()
if not self.item_name:
self.item_name = self.item_code
if not self.description:
self.description = self.item_name
self.validate_uom()
self.validate_description()
self.add_default_uom_in_conversion_factor_table()
self.validate_conversion_factor()
self.validate_item_type()
self.check_for_active_boms()
self.fill_customer_code()
self.check_item_tax()
self.validate_barcode()
self.validate_warehouse_for_reorder()
self.update_bom_item_desc()
self.synced_with_hub = 0
self.validate_has_variants()
self.validate_stock_exists_for_template_item()
self.validate_attributes()
self.validate_variant_attributes()
self.validate_variant_based_on_change()
self.validate_website_image()
self.make_thumbnail()
self.validate_fixed_asset()
self.validate_retain_sample()
self.validate_uom_conversion_factor()
self.validate_item_defaults()
self.update_defaults_from_item_group()
self.validate_stock_for_has_batch_and_has_serial()
if not self.get("__islocal"):
self.old_item_group = frappe.db.get_value(self.doctype, self.name, "item_group")
self.old_website_item_groups = frappe.db.sql_list("""select item_group
from `tabWebsite Item Group`
where parentfield='website_item_groups' and parenttype='Item' and parent=%s""", self.name)
def on_update(self):
invalidate_cache_for_item(self)
self.validate_name_with_item_group()
self.update_variants()
self.update_item_price()
self.update_template_item()
def validate_description(self):
'''Clean HTML description if set'''
if cint(frappe.db.get_single_value('Stock Settings', 'clean_description_html')):
self.description = clean_html(self.description)
def add_price(self, price_list=None):
'''Add a new price'''
if not price_list:
price_list = (frappe.db.get_single_value('Selling Settings', 'selling_price_list')
or frappe.db.get_value('Price List', _('Standard Selling')))
if price_list:
item_price = frappe.get_doc({
"doctype": "Item Price",
"price_list": price_list,
"item_code": self.name,
"currency": erpnext.get_default_currency(),
"price_list_rate": self.standard_rate
})
item_price.insert()
def set_opening_stock(self):
'''set opening stock'''
if not self.is_stock_item or self.has_serial_no or self.has_batch_no:
return
if not self.valuation_rate and self.standard_rate:
self.valuation_rate = self.standard_rate
if not self.valuation_rate:
frappe.throw(_("Valuation Rate is mandatory if Opening Stock entered"))
from erpnext.stock.doctype.stock_entry.stock_entry_utils import make_stock_entry
# default warehouse, or Stores
for default in self.item_defaults:
default_warehouse = (default.default_warehouse
or frappe.db.get_single_value('Stock Settings', 'default_warehouse')
or frappe.db.get_value('Warehouse', {'warehouse_name': _('Stores')}))
if default_warehouse:
stock_entry = make_stock_entry(item_code=self.name, target=default_warehouse, qty=self.opening_stock,
rate=self.valuation_rate, company=default.company)
stock_entry.add_comment("Comment", _("Opening Stock"))
def make_route(self):
if not self.route:
return cstr(frappe.db.get_value('Item Group', self.item_group,
'route')) + '/' + self.scrub((self.item_name if self.item_name else self.item_code) + '-' + random_string(5))
def validate_website_image(self):
if frappe.flags.in_import:
return
"""Validate if the website image is a public file"""
auto_set_website_image = False
if not self.website_image and self.image:
auto_set_website_image = True
self.website_image = self.image
if not self.website_image:
return
# find if website image url exists as public
file_doc = frappe.get_all("File", filters={
"file_url": self.website_image
}, fields=["name", "is_private"], order_by="is_private asc", limit_page_length=1)
if file_doc:
file_doc = file_doc[0]
if not file_doc:
if not auto_set_website_image:
frappe.msgprint(_("Website Image {0} attached to Item {1} cannot be found").format(self.website_image, self.name))
self.website_image = None
elif file_doc.is_private:
if not auto_set_website_image:
frappe.msgprint(_("Website Image should be a public file or website URL"))
self.website_image = None
def make_thumbnail(self):
if frappe.flags.in_import:
return
"""Make a thumbnail of `website_image`"""
import requests.exceptions
if not self.is_new() and self.website_image != frappe.db.get_value(self.doctype, self.name, "website_image"):
self.thumbnail = None
if self.website_image and not self.thumbnail:
file_doc = None
try:
file_doc = frappe.get_doc("File", {
"file_url": self.website_image,
"attached_to_doctype": "Item",
"attached_to_name": self.name
})
except frappe.DoesNotExistError:
pass
# cleanup
frappe.local.message_log.pop()
except requests.exceptions.HTTPError:
frappe.msgprint(_("Warning: Invalid attachment {0}").format(self.website_image))
self.website_image = None
except requests.exceptions.SSLError:
frappe.msgprint(
_("Warning: Invalid SSL certificate on attachment {0}").format(self.website_image))
self.website_image = None
# for CSV import
if self.website_image and not file_doc:
try:
file_doc = frappe.get_doc({
"doctype": "File",
"file_url": self.website_image,
"attached_to_doctype": "Item",
"attached_to_name": self.name
}).insert()
except IOError:
self.website_image = None
if file_doc:
if not file_doc.thumbnail_url:
file_doc.make_thumbnail()
self.thumbnail = file_doc.thumbnail_url
def validate_fixed_asset(self):
if self.is_fixed_asset:
if self.is_stock_item:
frappe.throw(_("Fixed Asset Item must be a non-stock item."))
if not self.asset_category:
frappe.throw(_("Asset Category is mandatory for Fixed Asset item"))
if self.stock_ledger_created():
frappe.throw(_("Cannot be a fixed asset item as Stock Ledger is created."))
if not self.is_fixed_asset:
asset = frappe.db.get_all("Asset", filters={"item_code": self.name, "docstatus": 1}, limit=1)
if asset:
frappe.throw(_('"Is Fixed Asset" cannot be unchecked, as Asset record exists against the item'))
def validate_retain_sample(self):
if self.retain_sample and not frappe.db.get_single_value('Stock Settings', 'sample_retention_warehouse'):
frappe.throw(_("Please select Sample Retention Warehouse in Stock Settings first"))
if self.retain_sample and not self.has_batch_no:
frappe.throw(_(" {0} Retain Sample is based on batch, please check Has Batch No to retain sample of item").format(
self.item_code))
def get_context(self, context):
context.show_search = True
context.search_link = '/product_search'
context.parents = get_parent_item_groups(self.item_group)
self.set_variant_context(context)
self.set_attribute_context(context)
self.set_disabled_attributes(context)
return context
def set_variant_context(self, context):
if self.has_variants:
context.no_cache = True
# load variants
# also used in set_attribute_context
context.variants = frappe.get_all("Item",
filters={"variant_of": self.name, "show_variant_in_website": 1},
order_by="name asc")
variant = frappe.form_dict.variant
if not variant and context.variants:
# the case when the item is opened for the first time from its list
variant = context.variants[0]
if variant:
context.variant = frappe.get_doc("Item", variant)
for fieldname in ("website_image", "web_long_description", "description",
"website_specifications"):
if context.variant.get(fieldname):
value = context.variant.get(fieldname)
if isinstance(value, list):
value = [d.as_dict() for d in value]
context[fieldname] = value
if self.slideshow:
if context.variant and context.variant.slideshow:
context.update(get_slideshow(context.variant))
else:
context.update(get_slideshow(self))
def set_attribute_context(self, context):
if self.has_variants:
attribute_values_available = {}
context.attribute_values = {}
context.selected_attributes = {}
# load attributes
for v in context.variants:
v.attributes = frappe.get_all("Item Variant Attribute",
fields=["attribute", "attribute_value"],
filters={"parent": v.name})
for attr in v.attributes:
values = attribute_values_available.setdefault(attr.attribute, [])
if attr.attribute_value not in values:
values.append(attr.attribute_value)
if v.name == context.variant.name:
context.selected_attributes[attr.attribute] = attr.attribute_value
# filter attributes, order based on attribute table
for attr in self.attributes:
values = context.attribute_values.setdefault(attr.attribute, [])
if cint(frappe.db.get_value("Item Attribute", attr.attribute, "numeric_values")):
for val in sorted(attribute_values_available.get(attr.attribute, []), key=flt):
values.append(val)
else:
# get list of values defined (for sequence)
for attr_value in frappe.db.get_all("Item Attribute Value",
fields=["attribute_value"],
filters={"parent": attr.attribute}, order_by="idx asc"):
if attr_value.attribute_value in attribute_values_available.get(attr.attribute, []):
values.append(attr_value.attribute_value)
context.variant_info = json.dumps(context.variants)
def set_disabled_attributes(self, context):
"""Disable selection options of attribute combinations that do not result in a variant"""
if not self.attributes or not self.has_variants:
return
context.disabled_attributes = {}
attributes = [attr.attribute for attr in self.attributes]
def find_variant(combination):
for variant in context.variants:
if len(variant.attributes) < len(attributes):
continue
if "combination" not in variant:
ref_combination = []
for attr in variant.attributes:
idx = attributes.index(attr.attribute)
ref_combination.insert(idx, attr.attribute_value)
variant["combination"] = ref_combination
if not (set(combination) - set(variant["combination"])):
# check if the combination is a subset of a variant combination
# eg. [Blue, 0.5] is a possible combination if exists [Blue, Large, 0.5]
return True
for i, attr in enumerate(self.attributes):
if i == 0:
continue
combination_source = []
# loop through previous attributes
for prev_attr in self.attributes[:i]:
combination_source.append([context.selected_attributes.get(prev_attr.attribute)])
combination_source.append(context.attribute_values[attr.attribute])
for combination in itertools.product(*combination_source):
if not find_variant(combination):
context.disabled_attributes.setdefault(attr.attribute, []).append(combination[-1])
def add_default_uom_in_conversion_factor_table(self):
uom_conv_list = [d.uom for d in self.get("uoms")]
if self.stock_uom not in uom_conv_list:
ch = self.append('uoms', {})
ch.uom = self.stock_uom
ch.conversion_factor = 1
to_remove = []
for d in self.get("uoms"):
if d.conversion_factor == 1 and d.uom != self.stock_uom:
to_remove.append(d)
[self.remove(d) for d in to_remove]
def update_template_tables(self):
template = frappe.get_doc("Item", self.variant_of)
# add item taxes from template
for d in template.get("taxes"):
self.append("taxes", {"tax_type": d.tax_type, "tax_rate": d.tax_rate})
# copy re-order table if empty
if not self.get("reorder_levels"):
for d in template.get("reorder_levels"):
n = {}
for k in ("warehouse", "warehouse_reorder_level",
"warehouse_reorder_qty", "material_request_type"):
n[k] = d.get(k)
self.append("reorder_levels", n)
def validate_conversion_factor(self):
check_list = []
for d in self.get('uoms'):
if cstr(d.uom) in check_list:
frappe.throw(
_("Unit of Measure {0} has been entered more than once in Conversion Factor Table").format(d.uom))
else:
check_list.append(cstr(d.uom))
if d.uom and cstr(d.uom) == cstr(self.stock_uom) and flt(d.conversion_factor) != 1:
frappe.throw(
_("Conversion factor for default Unit of Measure must be 1 in row {0}").format(d.idx))
def validate_item_type(self):
if self.has_serial_no == 1 and self.is_stock_item == 0 and not self.is_fixed_asset:
msgprint(_("'Has Serial No' can not be 'Yes' for non-stock item"), raise_exception=1)
if self.has_serial_no == 0 and self.serial_no_series:
self.serial_no_series = None
def check_for_active_boms(self):
if self.default_bom:
bom_item = frappe.db.get_value("BOM", self.default_bom, "item")
if bom_item not in (self.name, self.variant_of):
frappe.throw(
_("Default BOM ({0}) must be active for this item or its template").format(bom_item))
def fill_customer_code(self):
""" Append all the customer codes and insert into "customer_code" field of item table """
cust_code = []
for d in self.get('customer_items'):
cust_code.append(d.ref_code)
self.customer_code = ','.join(cust_code)
def check_item_tax(self):
"""Check whether Tax Rate is not entered twice for same Tax Type"""
check_list = []
for d in self.get('taxes'):
if d.tax_type:
account_type = frappe.db.get_value("Account", d.tax_type, "account_type")
if account_type not in ['Tax', 'Chargeable', 'Income Account', 'Expense Account']:
frappe.throw(
_("Item Tax Row {0} must have account of type Tax or Income or Expense or Chargeable").format(d.idx))
else:
if d.tax_type in check_list:
frappe.throw(_("{0} entered twice in Item Tax").format(d.tax_type))
else:
check_list.append(d.tax_type)
def validate_barcode(self):
from stdnum import ean
if len(self.barcodes) > 0:
for item_barcode in self.barcodes:
options = frappe.get_meta("Item Barcode").get_options("barcode_type").split('\n')
if item_barcode.barcode:
duplicate = frappe.db.sql(
"""select parent from `tabItem Barcode` where barcode = %s and parent != %s""", (item_barcode.barcode, self.name))
if duplicate:
frappe.throw(_("Barcode {0} already used in Item {1}").format(
item_barcode.barcode, duplicate[0][0]), frappe.DuplicateEntryError)
item_barcode.barcode_type = "" if item_barcode.barcode_type not in options else item_barcode.barcode_type
if item_barcode.barcode_type and item_barcode.barcode_type.upper() in ('EAN', 'UPC-A', 'EAN-13', 'EAN-8'):
if not ean.is_valid(item_barcode.barcode):
frappe.throw(_("Barcode {0} is not a valid {1} code").format(
item_barcode.barcode, item_barcode.barcode_type), InvalidBarcode)
def validate_warehouse_for_reorder(self):
'''Validate Reorder level table for duplicate and conditional mandatory'''
warehouse = []
for d in self.get("reorder_levels"):
if not d.warehouse_group:
d.warehouse_group = d.warehouse
if d.get("warehouse") and d.get("warehouse") not in warehouse:
warehouse += [d.get("warehouse")]
else:
frappe.throw(_("Row {0}: An Reorder entry already exists for this warehouse {1}")
.format(d.idx, d.warehouse), DuplicateReorderRows)
if d.warehouse_reorder_level and not d.warehouse_reorder_qty:
frappe.throw(_("Row #{0}: Please set reorder quantity").format(d.idx))
def stock_ledger_created(self):
if not hasattr(self, '_stock_ledger_created'):
self._stock_ledger_created = len(frappe.db.sql("""select name from `tabStock Ledger Entry`
where item_code = %s limit 1""", self.name))
return self._stock_ledger_created
def validate_name_with_item_group(self):
# causes problem with tree build
if frappe.db.exists("Item Group", self.name):
frappe.throw(
_("An Item Group exists with same name, please change the item name or rename the item group"))
def update_item_price(self):
frappe.db.sql("""update `tabItem Price` set item_name=%s,
item_description=%s, brand=%s where item_code=%s""",
(self.item_name, self.description, self.brand, self.name))
def on_trash(self):
super(Item, self).on_trash()
frappe.db.sql("""delete from tabBin where item_code=%s""", self.name)
frappe.db.sql("delete from `tabItem Price` where item_code=%s", self.name)
for variant_of in frappe.get_all("Item", filters={"variant_of": self.name}):
frappe.delete_doc("Item", variant_of.name)
def before_rename(self, old_name, new_name, merge=False):
if self.item_name == old_name:
frappe.db.set_value("Item", old_name, "item_name", new_name)
if merge:
# Validate properties before merging
if not frappe.db.exists("Item", new_name):
frappe.throw(_("Item {0} does not exist").format(new_name))
field_list = ["stock_uom", "is_stock_item", "has_serial_no", "has_batch_no"]
new_properties = [cstr(d) for d in frappe.db.get_value("Item", new_name, field_list)]
if new_properties != [cstr(self.get(fld)) for fld in field_list]:
frappe.throw(_("To merge, following properties must be same for both items")
+ ": \n" + ", ".join([self.meta.get_label(fld) for fld in field_list]))
def after_rename(self, old_name, new_name, merge):
if self.route:
invalidate_cache_for_item(self)
clear_cache(self.route)
frappe.db.set_value("Item", new_name, "item_code", new_name)
if merge:
self.set_last_purchase_rate(new_name)
self.recalculate_bin_qty(new_name)
for dt in ("Sales Taxes and Charges", "Purchase Taxes and Charges"):
for d in frappe.db.sql("""select name, item_wise_tax_detail from `tab{0}`
where ifnull(item_wise_tax_detail, '') != ''""".format(dt), as_dict=1):
item_wise_tax_detail = json.loads(d.item_wise_tax_detail)
if isinstance(item_wise_tax_detail, dict) and old_name in item_wise_tax_detail:
item_wise_tax_detail[new_name] = item_wise_tax_detail[old_name]
item_wise_tax_detail.pop(old_name)
frappe.db.set_value(dt, d.name, "item_wise_tax_detail",
json.dumps(item_wise_tax_detail), update_modified=False)
def set_last_purchase_rate(self, new_name):
last_purchase_rate = get_last_purchase_details(new_name).get("base_rate", 0)
frappe.db.set_value("Item", new_name, "last_purchase_rate", last_purchase_rate)
def recalculate_bin_qty(self, new_name):
from erpnext.stock.stock_balance import repost_stock
frappe.db.auto_commit_on_many_writes = 1
existing_allow_negative_stock = frappe.db.get_value("Stock Settings", None, "allow_negative_stock")
frappe.db.set_value("Stock Settings", None, "allow_negative_stock", 1)
repost_stock_for_warehouses = frappe.db.sql_list("""select distinct warehouse
from tabBin where item_code=%s""", new_name)
# Delete all existing bins to avoid duplicate bins for the same item and warehouse
frappe.db.sql("delete from `tabBin` where item_code=%s", new_name)
for warehouse in repost_stock_for_warehouses:
repost_stock(new_name, warehouse)
frappe.db.set_value("Stock Settings", None, "allow_negative_stock", existing_allow_negative_stock)
frappe.db.auto_commit_on_many_writes = 0
def copy_specification_from_item_group(self):
self.set("website_specifications", [])
if self.item_group:
for label, desc in frappe.db.get_values("Item Website Specification",
{"parent": self.item_group}, ["label", "description"]):
row = self.append("website_specifications")
row.label = label
row.description = desc
def update_bom_item_desc(self):
if self.is_new():
return
if self.db_get('description') != self.description:
frappe.db.sql("""
update `tabBOM`
set description = %s
where item = %s and docstatus < 2
""", (self.description, self.name))
frappe.db.sql("""
update `tabBOM Item`
set description = %s
where item_code = %s and docstatus < 2
""", (self.description, self.name))
frappe.db.sql("""
update `tabBOM Explosion Item`
set description = %s
where item_code = %s and docstatus < 2
""", (self.description, self.name))
def update_template_item(self):
"""Set Show in Website for Template Item if True for its Variant"""
if self.variant_of:
if self.show_in_website:
self.show_variant_in_website = 1
self.show_in_website = 0
if self.show_variant_in_website:
# show template
template_item = frappe.get_doc("Item", self.variant_of)
if not template_item.show_in_website:
template_item.show_in_website = 1
template_item.flags.dont_update_variants = True
template_item.flags.ignore_permissions = True
template_item.save()
def validate_item_defaults(self):
companies = list(set([row.company for row in self.item_defaults]))
if len(companies) != len(self.item_defaults):
frappe.throw(_("Cannot set multiple Item Defaults for a company."))
def update_defaults_from_item_group(self):
"""Get defaults from Item Group"""
if self.item_group and not self.item_defaults:
item_defaults = frappe.db.get_values("Item Default", {"parent": self.item_group},
['company', 'default_warehouse','default_price_list','buying_cost_center','default_supplier',
'expense_account','selling_cost_center','income_account'], as_dict = 1)
if item_defaults:
for item in item_defaults:
self.append('item_defaults', {
'company': item.company,
'default_warehouse': item.default_warehouse,
'default_price_list': item.default_price_list,
'buying_cost_center': item.buying_cost_center,
'default_supplier': item.default_supplier,
'expense_account': item.expense_account,
'selling_cost_center': item.selling_cost_center,
'income_account': item.income_account
})
else:
warehouse = ''
defaults = frappe.defaults.get_defaults() or {}
# To check default warehouse is belong to the default company
if defaults.get("default_warehouse") and frappe.db.exists("Warehouse",
{'name': defaults.default_warehouse, 'company': defaults.company}):
warehouse = defaults.default_warehouse
self.append("item_defaults", {
"company": defaults.get("company"),
"default_warehouse": warehouse
})
def update_variants(self):
if self.flags.dont_update_variants or \
frappe.db.get_single_value('Item Variant Settings', 'do_not_update_variants'):
return
if self.has_variants:
variants = frappe.db.get_all("Item", fields=["item_code"], filters={"variant_of": self.name})
if variants:
if len(variants) <= 30:
update_variants(variants, self, publish_progress=False)
frappe.msgprint(_("Item Variants updated"))
else:
frappe.enqueue("erpnext.stock.doctype.item.item.update_variants",
variants=variants, template=self, now=frappe.flags.in_test, timeout=600)
def validate_has_variants(self):
if not self.has_variants and frappe.db.get_value("Item", self.name, "has_variants"):
if frappe.db.exists("Item", {"variant_of": self.name}):
frappe.throw(_("Item has variants."))
def validate_stock_exists_for_template_item(self):
if self.stock_ledger_created() and self._doc_before_save:
if (cint(self._doc_before_save.has_variants) != cint(self.has_variants)
or self._doc_before_save.variant_of != self.variant_of):
frappe.throw(_("Cannot change Variant properties after stock transaction. You will have to make a new Item to do this.").format(self.name),
StockExistsForTemplate)
if self.has_variants or self.variant_of:
if not self.is_child_table_same('attributes'):
frappe.throw(
_('Cannot change Attributes after stock transaction. Make a new Item and transfer stock to the new Item'))
def validate_variant_based_on_change(self):
if not self.is_new() and (self.variant_of or (self.has_variants and frappe.get_all("Item", {"variant_of": self.name}))):
if self.variant_based_on != frappe.db.get_value("Item", self.name, "variant_based_on"):
frappe.throw(_("Variant Based On cannot be changed"))
def validate_uom(self):
if not self.get("__islocal"):
check_stock_uom_with_bin(self.name, self.stock_uom)
if self.has_variants:
for d in frappe.db.get_all("Item", filters={"variant_of": self.name}):
check_stock_uom_with_bin(d.name, self.stock_uom)
if self.variant_of:
template_uom = frappe.db.get_value("Item", self.variant_of, "stock_uom")
if template_uom != self.stock_uom:
frappe.throw(_("Default Unit of Measure for Variant '{0}' must be same as in Template '{1}'")
.format(self.stock_uom, template_uom))
def validate_uom_conversion_factor(self):
if self.uoms:
for d in self.uoms:
value = get_uom_conv_factor(d.uom, self.stock_uom)
if value:
d.conversion_factor = value
def validate_attributes(self):
if not (self.has_variants or self.variant_of):
return
if not self.variant_based_on:
self.variant_based_on = 'Item Attribute'
if self.variant_based_on == 'Item Attribute':
attributes = []
if not self.attributes:
frappe.throw(_("Attribute table is mandatory"))
for d in self.attributes:
if d.attribute in attributes:
frappe.throw(
_("Attribute {0} selected multiple times in Attributes Table".format(d.attribute)))
else:
attributes.append(d.attribute)
def validate_variant_attributes(self):
if self.is_new() and self.variant_of and self.variant_based_on == 'Item Attribute':
args = {}
for d in self.attributes:
if cstr(d.attribute_value).strip() == '':
frappe.throw(_("Please specify Attribute Value for attribute {0}").format(d.attribute))
args[d.attribute] = d.attribute_value
variant = get_variant(self.variant_of, args, self.name)
if variant:
frappe.throw(_("Item variant {0} exists with same attributes")
.format(variant), ItemVariantExistsError)
validate_item_variant_attributes(self, args)
def validate_stock_for_has_batch_and_has_serial(self):
if self.stock_ledger_created():
for value in ["has_batch_no", "has_serial_no"]:
if frappe.db.get_value("Item", self.name, value) != self.get_value(value):
frappe.throw(_("Cannot change {0} as Stock Transaction for Item {1} exist.".format(value, self.name)))
def get_timeline_data(doctype, name):
'''returns timeline data based on stock ledger entry'''
out = {}
items = dict(frappe.db.sql('''select posting_date, count(*)
from `tabStock Ledger Entry` where item_code=%s
and posting_date > date_sub(curdate(), interval 1 year)
group by posting_date''', name))
for date, count in iteritems(items):
timestamp = get_timestamp(date)
out.update({timestamp: count})
return out
def validate_end_of_life(item_code, end_of_life=None, disabled=None, verbose=1):
if (not end_of_life) or (disabled is None):
end_of_life, disabled = frappe.db.get_value("Item", item_code, ["end_of_life", "disabled"])
if end_of_life and end_of_life != "0000-00-00" and getdate(end_of_life) <= now_datetime().date():
msg = _("Item {0} has reached its end of life on {1}").format(item_code, formatdate(end_of_life))
_msgprint(msg, verbose)
if disabled:
_msgprint(_("Item {0} is disabled").format(item_code), verbose)
def validate_is_stock_item(item_code, is_stock_item=None, verbose=1):
if not is_stock_item:
is_stock_item = frappe.db.get_value("Item", item_code, "is_stock_item")
if is_stock_item != 1:
msg = _("Item {0} is not a stock Item").format(item_code)
_msgprint(msg, verbose)
def validate_cancelled_item(item_code, docstatus=None, verbose=1):
if docstatus is None:
docstatus = frappe.db.get_value("Item", item_code, "docstatus")
if docstatus == 2:
msg = _("Item {0} is cancelled").format(item_code)
_msgprint(msg, verbose)
def _msgprint(msg, verbose):
if verbose:
msgprint(msg, raise_exception=True)
else:
raise frappe.ValidationError(msg)
def get_last_purchase_details(item_code, doc_name=None, conversion_rate=1.0):
"""returns last purchase details in stock uom"""
# get last purchase order item details
last_purchase_order = frappe.db.sql("""\
select po.name, po.transaction_date, po.conversion_rate,
po_item.conversion_factor, po_item.base_price_list_rate,
po_item.discount_percentage, po_item.base_rate
from `tabPurchase Order` po, `tabPurchase Order Item` po_item
where po.docstatus = 1 and po_item.item_code = %s and po.name != %s and
po.name = po_item.parent
order by po.transaction_date desc, po.name desc
limit 1""", (item_code, cstr(doc_name)), as_dict=1)
# get last purchase receipt item details
last_purchase_receipt = frappe.db.sql("""\
select pr.name, pr.posting_date, pr.posting_time, pr.conversion_rate,
pr_item.conversion_factor, pr_item.base_price_list_rate, pr_item.discount_percentage,
pr_item.base_rate
from `tabPurchase Receipt` pr, `tabPurchase Receipt Item` pr_item
where pr.docstatus = 1 and pr_item.item_code = %s and pr.name != %s and
pr.name = pr_item.parent
order by pr.posting_date desc, pr.posting_time desc, pr.name desc
limit 1""", (item_code, cstr(doc_name)), as_dict=1)
purchase_order_date = getdate(last_purchase_order and last_purchase_order[0].transaction_date
or "1900-01-01")
purchase_receipt_date = getdate(last_purchase_receipt and
last_purchase_receipt[0].posting_date or "1900-01-01")
if (purchase_order_date > purchase_receipt_date) or \
(last_purchase_order and not last_purchase_receipt):
# use purchase order
last_purchase = last_purchase_order[0]
purchase_date = purchase_order_date
elif (purchase_receipt_date > purchase_order_date) or \
(last_purchase_receipt and not last_purchase_order):
# use purchase receipt
last_purchase = last_purchase_receipt[0]
purchase_date = purchase_receipt_date
else:
return frappe._dict()
conversion_factor = flt(last_purchase.conversion_factor)
out = frappe._dict({
"base_price_list_rate": flt(last_purchase.base_price_list_rate) / conversion_factor,
"base_rate": flt(last_purchase.base_rate) / conversion_factor,
"discount_percentage": flt(last_purchase.discount_percentage),
"purchase_date": purchase_date
})
conversion_rate = flt(conversion_rate) or 1.0
out.update({
"price_list_rate": out.base_price_list_rate / conversion_rate,
"rate": out.base_rate / conversion_rate,
"base_rate": out.base_rate
})
return out
def invalidate_cache_for_item(doc):
invalidate_cache_for(doc, doc.item_group)
website_item_groups = list(set((doc.get("old_website_item_groups") or [])
+ [d.item_group for d in doc.get({"doctype": "Website Item Group"}) if d.item_group]))
for item_group in website_item_groups:
invalidate_cache_for(doc, item_group)
if doc.get("old_item_group") and doc.get("old_item_group") != doc.item_group:
invalidate_cache_for(doc, doc.old_item_group)
def check_stock_uom_with_bin(item, stock_uom):
if stock_uom == frappe.db.get_value("Item", item, "stock_uom"):
return
matched = True
ref_uom = frappe.db.get_value("Stock Ledger Entry",
{"item_code": item}, "stock_uom")
if ref_uom:
if cstr(ref_uom) != cstr(stock_uom):
matched = False
else:
bin_list = frappe.db.sql("select * from tabBin where item_code=%s", item, as_dict=1)
for bin in bin_list:
if (bin.reserved_qty > 0 or bin.ordered_qty > 0 or bin.indented_qty > 0
or bin.planned_qty > 0) and cstr(bin.stock_uom) != cstr(stock_uom):
matched = False
break
if matched and bin_list:
frappe.db.sql("""update tabBin set stock_uom=%s where item_code=%s""", (stock_uom, item))
if not matched:
frappe.throw(
_("Default Unit of Measure for Item {0} cannot be changed directly because you have already made some transaction(s) with another UOM. You will need to create a new Item to use a different Default UOM.").format(item))
def get_item_defaults(item_code, company):
item = frappe.get_cached_doc('Item', item_code)
out = item.as_dict()
for d in item.item_defaults:
if d.company == company:
row = copy.deepcopy(d.as_dict())
row.pop("name")
out.update(row)
return out
def set_item_default(item_code, company, fieldname, value):
item = frappe.get_cached_doc('Item', item_code)
for d in item.item_defaults:
if d.company == company:
if not d.get(fieldname):
frappe.db.set_value(d.doctype, d.name, fieldname, value)
return
# no row found, add a new row for the company
d = item.append('item_defaults', {fieldname: value, "company": company})
d.db_insert()
item.clear_cache()
@frappe.whitelist()
def get_uom_conv_factor(uom, stock_uom):
uoms = [uom, stock_uom]
value = ""
uom_details = frappe.db.sql("""select to_uom, from_uom, value from `tabUOM Conversion Factor`\
where to_uom in ({0})
""".format(', '.join(['"' + frappe.db.escape(i, percent=False) + '"' for i in uoms])), as_dict=True)
for d in uom_details:
if d.from_uom == stock_uom and d.to_uom == uom:
value = 1/flt(d.value)
elif d.from_uom == uom and d.to_uom == stock_uom:
value = d.value
if not value:
uom_stock = frappe.db.get_value("UOM Conversion Factor", {"to_uom": stock_uom}, ["from_uom", "value"], as_dict=1)
uom_row = frappe.db.get_value("UOM Conversion Factor", {"to_uom": uom}, ["from_uom", "value"], as_dict=1)
if uom_stock and uom_row:
if uom_stock.from_uom == uom_row.from_uom:
value = flt(uom_stock.value) * 1/flt(uom_row.value)
return value
@frappe.whitelist()
def get_item_attribute(parent, attribute_value=''):
if not frappe.has_permission("Item"):
frappe.msgprint(_("No Permission"), raise_exception=1)
return frappe.get_all("Item Attribute Value", fields = ["attribute_value"],
filters = {'parent': parent, 'attribute_value': ("like", "%%%s%%" % attribute_value)})
def update_variants(variants, template, publish_progress=True):
count=0
for d in variants:
variant = frappe.get_doc("Item", d)
copy_attributes_to_variant(template, variant)
variant.save()
count+=1
if publish_progress:
frappe.publish_progress(count*100/len(variants), title = _("Updating Variants..."))
| shubhamgupta123/erpnext | erpnext/stock/doctype/item/item.py | Python | gpl-3.0 | 36,632 |
import unittest
import os
from ui import main
print os.getcwd()
class TestMain(unittest.TestCase):
def setUp(self):
self.m = main.MainWindow()
def test_mainWindow(self):
assert(self.m)
def test_dataframe(self):
import numpy
#Random 25x4 Numpy Matrix
self.m.render_dataframe(numpy.random.rand(25,4) ,name='devel',rownames=xrange(0,25))
assert(self.m.active_robject)
assert(self.m.active_robject.columns)
assert(self.m.active_robject.column_data)
def test_imports(self):
datasets = ['iris','Nile','morley','freeny','sleep','mtcars']
for a in datasets:
main.rsession.r('%s=%s' % (a,a))
self.m.sync_with_r()
assert(a in self.m.robjects)
unittest.main() | sdiehl/rpygtk | rpygtk/runtests.py | Python | gpl-3.0 | 818 |
from datetime import datetime
import factory
from zds.forum.factories import PostFactory, TopicFactory
from zds.gallery.factories import GalleryFactory, UserGalleryFactory
from zds.utils.factories import LicenceFactory, SubCategoryFactory
from zds.utils.models import Licence
from zds.tutorialv2.models.database import PublishableContent, Validation, ContentReaction
from zds.tutorialv2.models.versioned import Container, Extract
from zds.tutorialv2.publication_utils import publish_content
from zds.tutorialv2.utils import init_new_repo
text_content = "Ceci est un texte bidon, **avec markown**"
tricky_text_content = (
"Ceci est un texte contenant plein d'images, pour la publication. Le modifier affectera le test !\n\n"
"# Les images\n\n"
"Image: ![PNG qui existe](https://upload.wikimedia.org/wikipedia/commons/2/24/"
"Derivative_of_medical_imaging.jpg)\n\n"
"Image: ![PNG qui existe pas](example.com/test.png)\n\n"
"Image: ![SVG qui existe](https://upload.wikimedia.org/wikipedia/commons/f/f9/10DF.svg)\n\n"
"Image: ![SVG qui existe pas](example.com/test.svg)\n\n"
"Image: ![GIF qui existe](https://upload.wikimedia.org/wikipedia/commons/2/27/AnimatedStar.gif)\n\n"
"Image: ![GIF qui existe pas](example.com/test.gif)\n\n"
"Image: ![Image locale qui existe pas](does-not-exist/test.png)\n\n"
"Image: ![Bonus: image bizarre](https://s2.qwant.com/thumbr/300x0/e/7/"
"56e2a2bdcd656d0b8a29c650116e29e893239089f71adf128d5f06330703b1/1024px-"
"Oh_my_darling.jpg?u=https%3A%2F%2Fupload"
".wikimedia.org%2Fwikipedia%2Fcommons%2Fthumb%2Fa%2Fa9%2FOh_my_darling.jpg%2F1024px-"
"Oh_my_darling.jpg&q=0&b=0&p=0&a=0)\n\n"
"Image: ![Bonus: le serveur existe pas !](http://unknown.image.zds/test.png)\n\n"
"Image: ![Bonus: juste du texte](URL invalide)\n\n"
"# Et donc ...\n\n"
"Voilà :)"
)
class PublishableContentFactory(factory.django.DjangoModelFactory):
"""
Factory that creates a PublishableContent.
"""
class Meta:
model = PublishableContent
title = factory.Sequence("Mon contenu No{}".format)
description = factory.Sequence("Description du contenu No{}".format)
type = "TUTORIAL"
creation_date = datetime.now()
pubdate = datetime.now()
@classmethod
def _generate(cls, create, attrs):
# These parameters are only used inside _generate() and won't be saved in the database,
# which is why we use attrs.pop() (they are removed from attrs).
light = attrs.pop("light", True)
author_list = attrs.pop("author_list", None)
add_license = attrs.pop("add_license", True)
add_category = attrs.pop("add_category", True)
# This parameter will be saved in the database,
# which is why we use attrs.get() (it stays in attrs).
licence = attrs.get("licence", None)
auths = author_list or []
if add_license:
given_licence = licence or Licence.objects.first()
if isinstance(given_licence, str) and given_licence:
given_licence = Licence.objects.filter(title=given_licence).first() or Licence.objects.first()
licence = given_licence or LicenceFactory()
text = text_content
if not light:
text = tricky_text_content
publishable_content = super()._generate(create, attrs)
publishable_content.gallery = GalleryFactory()
publishable_content.licence = licence
for auth in auths:
publishable_content.authors.add(auth)
if add_category:
publishable_content.subcategory.add(SubCategoryFactory())
publishable_content.save()
for author in publishable_content.authors.all():
UserGalleryFactory(user=author, gallery=publishable_content.gallery, mode="W")
init_new_repo(publishable_content, text, text)
return publishable_content
class ContainerFactory(factory.Factory):
"""
Factory that creates a Container.
"""
class Meta:
model = Container
title = factory.Sequence(lambda n: "Mon container No{}".format(n + 1))
@classmethod
def _generate(cls, create, attrs):
# These parameters are only used inside _generate() and won't be saved in the database,
# which is why we use attrs.pop() (they are removed from attrs).
db_object = attrs.pop("db_object", None)
light = attrs.pop("light", True)
# This parameter will be saved in the database,
# which is why we use attrs.get() (it stays in attrs).
parent = attrs.get("parent", None)
# Needed because we use container.title later
container = super()._generate(create, attrs)
text = text_content
if not light:
text = tricky_text_content
sha = parent.repo_add_container(container.title, text, text)
container = parent.children[-1]
if db_object:
db_object.sha_draft = sha
db_object.save()
return container
class ExtractFactory(factory.Factory):
"""
Factory that creates a Extract.
"""
class Meta:
model = Extract
title = factory.Sequence(lambda n: "Mon extrait No{}".format(n + 1))
@classmethod
def _generate(cls, create, attrs):
# These parameters are only used inside _generate() and won't be saved in the database,
# which is why we use attrs.pop() (they are removed from attrs).
light = attrs.pop("light", True)
db_object = attrs.pop("db_object", None)
# This parameter will be saved in the database,
# which is why we use attrs.get() (it stays in attrs).
container = attrs.get("container", None)
# Needed because we use extract.title later
extract = super()._generate(create, attrs)
parent = container
text = text_content
if not light:
text = tricky_text_content
sha = parent.repo_add_extract(extract.title, text)
extract = parent.children[-1]
if db_object:
db_object.sha_draft = sha
db_object.save()
return extract
class ContentReactionFactory(factory.django.DjangoModelFactory):
"""
Factory that creates a ContentReaction.
"""
class Meta:
model = ContentReaction
ip_address = "192.168.3.1"
text = "Bonjour, je me présente, je m'appelle l'homme au texte bidonné"
@classmethod
def _generate(cls, create, attrs):
note = super()._generate(create, attrs)
note.pubdate = datetime.now()
note.save()
note.related_content.last_note = note
note.related_content.save()
return note
class BetaContentFactory(PublishableContentFactory):
"""
Factory that creates a PublishableContent with a beta version and a beta topic.
"""
@classmethod
def _generate(cls, create, attrs):
# This parameter is only used inside _generate() and won't be saved in the database,
# which is why we use attrs.pop() (it is removed from attrs).
beta_forum = attrs.pop("forum", None)
# Creates the PublishableContent (see PublishableContentFactory._generate() for more info)
publishable_content = super()._generate(create, attrs)
if publishable_content.authors.count() > 0 and beta_forum is not None:
beta_topic = TopicFactory(
title="[beta]" + publishable_content.title, author=publishable_content.authors.first(), forum=beta_forum
)
publishable_content.sha_beta = publishable_content.sha_draft
publishable_content.beta_topic = beta_topic
publishable_content.save()
PostFactory(topic=beta_topic, position=1, author=publishable_content.authors.first())
beta_topic.save()
return publishable_content
class PublishedContentFactory(PublishableContentFactory):
"""
Factory that creates a PublishableContent and the publish it.
"""
@classmethod
def _generate(cls, create, attrs):
# This parameter is only used inside _generate() and won't be saved in the database,
# which is why we use attrs.pop() (it is removed from attrs).
is_major_update = attrs.pop("is_major_update", True)
# Creates the PublishableContent (see PublishableContentFactory._generate() for more info)
content = super()._generate(create, attrs)
published = publish_content(content, content.load_version(), is_major_update)
content.sha_public = content.sha_draft
content.public_version = published
content.save()
return content
class ValidationFactory(factory.django.DjangoModelFactory):
"""
Factory that creates a Validation.
"""
class Meta:
model = Validation
| ChantyTaguan/zds-site | zds/tutorialv2/factories.py | Python | gpl-3.0 | 8,840 |
#
# -*- coding: utf-8 -*-
# Dia Group Resize Plugin
# Copyright (c) 2015, Alexandre Machado <axmachado@gmail.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import sys, dia
import os
import pygtk
pygtk.require("2.0")
import gtk
import locale
class ResizeWindow(object):
def __init__(self, group, data):
self.group = group
self.data = data
self.initWindow()
def initWindow(self):
self.dlg = gtk.Dialog()
self.dlg.set_title('Group Resize')
self.dlg.set_border_width(6)
self.dlg.vbox.pack_start(self.dialogContents(), fill=True, expand=True, padding=5)
self.dlg.add_button(gtk.STOCK_APPLY, gtk.RESPONSE_APPLY)
self.dlg.add_button(gtk.STOCK_CLOSE, gtk.RESPONSE_CLOSE)
self.dlg.set_has_separator(True)
self.dlg.set_modal(False)
self.dlg.get_widget_for_response(gtk.RESPONSE_CLOSE).connect("clicked", self.hide, None)
self.dlg.get_widget_for_response(gtk.RESPONSE_APPLY).connect("clicked", self.clickAplicar, None)
def dimensionsFrame(self, label):
frame = gtk.Frame(label)
table = gtk.Table(rows=4, columns=2)
ignore = gtk.RadioButton(group=None, label="do not change")
ignore.show()
smallest = gtk.RadioButton(group=ignore, label="shrink to smallest")
smallest.show()
largest = gtk.RadioButton(group=ignore, label="enlarge to largest")
largest.show()
specify = gtk.RadioButton(group=ignore, label="resize to:")
specify.show()
value = gtk.Entry()
value.show()
specify.connect("toggled", self.enableValueEntry, value)
self.enableValueEntry(specify, value)
table.attach (ignore, 0, 1, 0, 1)
table.attach (smallest, 0, 1, 1, 2)
table.attach (largest, 0, 1, 2, 3)
table.attach (specify, 0, 1, 3, 4)
table.attach (value, 1, 2, 3, 4)
frame.add(table)
table.show()
frame.show()
options = {
'ignore': ignore,
'smallest': smallest,
'largest': largest,
'specify': specify,
'value': value
}
return frame, options
def enableValueEntry(self, radioSpecify, entrySpecify, *args):
entrySpecify.set_sensitive(radioSpecify.get_active())
def contentsFrameWidth(self):
frame, self.widthOptions = self.dimensionsFrame('Width')
return frame
def contentsFrameHeight(self):
frame, self.heightOptions = self.dimensionsFrame('Height')
return frame
def dialogContents(self):
contents = gtk.VBox(spacing=5)
contents.pack_start(self.contentsFrameWidth(), fill=True, expand=True)
contents.pack_start(self.contentsFrameHeight(), fill=True, expand=True)
contents.show()
return contents
def getSelectedGroupOption(self, options):
value = options['value'].get_text()
for opt in 'ignore', 'smallest', 'largest', 'specify':
if options[opt].get_active():
return (opt,value)
return ('ignore',value)
def getValue(self, opt, value, elProperty):
if opt == 'specify':
return self.toFloat(value)
else:
values = [ x.properties[elProperty].value for x in self.group if x.properties.has_key(elProperty) ]
if opt == 'smallest':
return min(values)
else:
return max(values)
def adjustWidth(self, value):
for obj in self.group:
pos = obj.properties['obj_pos'].value
if obj.properties.has_key("elem_width"):
difference = value - obj.properties['elem_width'].value
handleLeft = obj.handles[3]
handleRight = obj.handles[4]
amount = difference/2
obj.move_handle(handleLeft, (handleLeft.pos.x - amount, handleLeft.pos.y), 0, 0)
obj.move_handle(handleRight, (handleRight.pos.x + amount, handleRight.pos.y), 0, 0)
obj.move(pos.x, pos.y)
def adjustHeight(self, value):
for obj in self.group:
pos = obj.properties['obj_pos'].value
if obj.properties.has_key("elem_height"):
difference = value - obj.properties['elem_height'].value
handleTop = obj.handles[1]
handleBottom = obj.handles[6]
amount = difference/2
obj.move_handle(handleTop, (handleTop.pos.x, handleTop.pos.y - amount), 0, 0)
obj.move_handle(handleBottom, (handleBottom.pos.x, handleBottom.pos.y + amount), 0, 0)
obj.move(pos.x, pos.y)
def toFloat(self, valor):
return locale.atof(valor)
def clickAplicar(self, *args):
optWidth = self.getSelectedGroupOption(self.widthOptions)
optHeight = self.getSelectedGroupOption(self.heightOptions)
try:
if optWidth[0] != 'ignore':
width = self.getValue(optWidth[0], optWidth[1], 'elem_width')
self.adjustWidth(width)
if optHeight[0] != 'ignore':
height = self.getValue(optHeight[0], optHeight[1], 'elem_height')
self.adjustHeight(height)
if dia.active_display():
diagram = dia.active_display().diagram
for obj in self.group:
diagram.update_connections(obj)
except Exception,e:
dia.message(gtk.MESSAGE_ERROR, repr(e))
if dia.active_display():
dia.active_display().add_update_all()
dia.active_display().flush()
def show(self):
self.dlg.show()
def hide(self, *args):
self.dlg.hide()
def run(self):
return self.dlg.run()
def dia_group_resize_db (data,flags):
diagram = dia.active_display().diagram
group = diagram.get_sorted_selected()
if len(group) > 0:
win = ResizeWindow(group, data)
win.show()
else:
dia.message(gtk.MESSAGE_INFO, "Please select a group of objects")
dia.register_action("ObjectGroupResize", "Group Resize",
"/DisplayMenu/Objects/ObjectsExtensionStart",
dia_group_resize_db)
| axmachado/dia-group-resize | group_resize.py | Python | gpl-3.0 | 6,960 |
from ..models import Album
from ..resource import SingleResource, ListResource
from ..schemas import AlbumSchema
class SingleAlbum(SingleResource):
schema = AlbumSchema()
routes = ('/album/<int:id>/',)
model = Album
class ListAlbums(ListResource):
schema = AlbumSchema(many=True)
routes = ('/album/', '/tracklist/')
model = Album
| justanr/owa | owa/api/album.py | Python | gpl-3.0 | 358 |
"""
System plugin
Copyright (C) 2016 Walid Benghabrit
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
from accmon.plugins.plugin import *
class System(Plugin):
def __init__(self):
super().__init__()
def handle_request(self, request):
res = super(System, self).handle_request(request)
if res is not None: return res
| hkff/AccMon | accmon/plugins/system.py | Python | gpl-3.0 | 920 |
#!/usr/bin/env python
import turtle
import random
def bloom(radius):
turtle.colormode(255)
for rad in range(40, 10, -5):
for looper in range(360//rad):
turtle.up()
turtle.circle(radius+rad, rad)
turtle.begin_fill()
turtle.fillcolor((200+random.randint(0, rad),
200+random.randint(0, rad),
200+random.randint(0, rad)))
turtle.down()
turtle.circle(-rad)
turtle.end_fill()
def main():
"""Simple flower, using global turtle instance"""
turtle.speed(0)
turtle.colormode(1.0)
bloom(5)
turtle.exitonclick()
###
if __name__ == "__main__":
main()
| mpclemens/python-explore | turtle/bloom.py | Python | gpl-3.0 | 728 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('characters', '0011_auto_20160212_1144'),
]
operations = [
migrations.CreateModel(
name='CharacterSpells',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('character', models.ForeignKey(verbose_name='Karakt\xe4r', to='characters.Character')),
],
options={
'verbose_name': 'Karakt\xe4rers magi',
'verbose_name_plural': 'Karakt\xe4rers magi',
},
),
migrations.AlterModelOptions(
name='spellextras',
options={'verbose_name': 'Magi extra', 'verbose_name_plural': 'Magi extra'},
),
migrations.AlterModelOptions(
name='spellinfo',
options={'verbose_name': 'Magi information', 'verbose_name_plural': 'Magi information'},
),
migrations.AddField(
model_name='spellinfo',
name='name',
field=models.CharField(default='Magins namn', max_length=256, verbose_name='Namn'),
),
migrations.AlterField(
model_name='spellinfo',
name='parent',
field=models.ForeignKey(verbose_name='Tillh\xf6righet', to='characters.SpellParent'),
),
migrations.AddField(
model_name='characterspells',
name='spells',
field=models.ManyToManyField(to='characters.SpellInfo', verbose_name='Magier och besv\xe4rjelser'),
),
]
| svamp/rp_management | characters/migrations/0012_auto_20160212_1210.py | Python | gpl-3.0 | 1,712 |
#!/usr/bin/env python
#
# Copyright 2011 Facebook
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Miscellaneous network utility code."""
from __future__ import absolute_import, division, print_function, with_statement
import errno
import os
import re
import socket
import ssl
import stat
from lib.tornado.concurrent import dummy_executor, run_on_executor
from lib.tornado.ioloop import IOLoop
from lib.tornado.platform.auto import set_close_exec
from lib.tornado.util import Configurable
def bind_sockets(port, address=None, family=socket.AF_UNSPEC, backlog=128, flags=None):
"""Creates listening sockets bound to the given port and address.
Returns a list of socket objects (multiple sockets are returned if
the given address maps to multiple IP addresses, which is most common
for mixed IPv4 and IPv6 use).
Address may be either an IP address or hostname. If it's a hostname,
the server will listen on all IP addresses associated with the
name. Address may be an empty string or None to listen on all
available interfaces. Family may be set to either `socket.AF_INET`
or `socket.AF_INET6` to restrict to IPv4 or IPv6 addresses, otherwise
both will be used if available.
The ``backlog`` argument has the same meaning as for
`socket.listen() <socket.socket.listen>`.
``flags`` is a bitmask of AI_* flags to `~socket.getaddrinfo`, like
``socket.AI_PASSIVE | socket.AI_NUMERICHOST``.
"""
sockets = []
if address == "":
address = None
if not socket.has_ipv6 and family == socket.AF_UNSPEC:
# Python can be compiled with --disable-ipv6, which causes
# operations on AF_INET6 sockets to fail, but does not
# automatically exclude those results from getaddrinfo
# results.
# http://bugs.python.org/issue16208
family = socket.AF_INET
if flags is None:
flags = socket.AI_PASSIVE
for res in set(socket.getaddrinfo(address, port, family, socket.SOCK_STREAM,
0, flags)):
af, socktype, proto, canonname, sockaddr = res
sock = socket.socket(af, socktype, proto)
set_close_exec(sock.fileno())
if os.name != 'nt':
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
if af == socket.AF_INET6:
# On linux, ipv6 sockets accept ipv4 too by default,
# but this makes it impossible to bind to both
# 0.0.0.0 in ipv4 and :: in ipv6. On other systems,
# separate sockets *must* be used to listen for both ipv4
# and ipv6. For consistency, always disable ipv4 on our
# ipv6 sockets and use a separate ipv4 socket when needed.
#
# Python 2.x on windows doesn't have IPPROTO_IPV6.
if hasattr(socket, "IPPROTO_IPV6"):
sock.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_V6ONLY, 1)
sock.setblocking(0)
sock.bind(sockaddr)
sock.listen(backlog)
sockets.append(sock)
return sockets
if hasattr(socket, 'AF_UNIX'):
def bind_unix_socket(file, mode=0o600, backlog=128):
"""Creates a listening unix socket.
If a socket with the given name already exists, it will be deleted.
If any other file with that name exists, an exception will be
raised.
Returns a socket object (not a list of socket objects like
`bind_sockets`)
"""
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
set_close_exec(sock.fileno())
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.setblocking(0)
try:
st = os.stat(file)
except OSError as err:
if err.errno != errno.ENOENT:
raise
else:
if stat.S_ISSOCK(st.st_mode):
os.remove(file)
else:
raise ValueError("File %s exists and is not a socket", file)
sock.bind(file)
os.chmod(file, mode)
sock.listen(backlog)
return sock
def add_accept_handler(sock, callback, io_loop=None):
"""Adds an `.IOLoop` event handler to accept new connections on ``sock``.
When a connection is accepted, ``callback(connection, address)`` will
be run (``connection`` is a socket object, and ``address`` is the
address of the other end of the connection). Note that this signature
is different from the ``callback(fd, events)`` signature used for
`.IOLoop` handlers.
"""
if io_loop is None:
io_loop = IOLoop.current()
def accept_handler(fd, events):
while True:
try:
connection, address = sock.accept()
except socket.error as e:
if e.args[0] in (errno.EWOULDBLOCK, errno.EAGAIN):
return
raise
callback(connection, address)
io_loop.add_handler(sock.fileno(), accept_handler, IOLoop.READ)
def is_valid_ip(ip):
"""Returns true if the given string is a well-formed IP address.
Supports IPv4 and IPv6.
"""
try:
res = socket.getaddrinfo(ip, 0, socket.AF_UNSPEC,
socket.SOCK_STREAM,
0, socket.AI_NUMERICHOST)
return bool(res)
except socket.gaierror as e:
if e.args[0] == socket.EAI_NONAME:
return False
raise
return True
class Resolver(Configurable):
"""Configurable asynchronous DNS resolver interface.
By default, a blocking implementation is used (which simply calls
`socket.getaddrinfo`). An alternative implementation can be
chosen with the `Resolver.configure <.Configurable.configure>`
class method::
Resolver.configure('tornado.netutil.ThreadedResolver')
The implementations of this interface included with Tornado are
* `tornado.netutil.BlockingResolver`
* `tornado.netutil.ThreadedResolver`
* `tornado.netutil.OverrideResolver`
* `tornado.platform.twisted.TwistedResolver`
* `tornado.platform.caresresolver.CaresResolver`
"""
@classmethod
def configurable_base(cls):
return Resolver
@classmethod
def configurable_default(cls):
return BlockingResolver
def resolve(self, host, port, family=socket.AF_UNSPEC, callback=None):
"""Resolves an address.
The ``host`` argument is a string which may be a hostname or a
literal IP address.
Returns a `.Future` whose result is a list of (family,
address) pairs, where address is a tuple suitable to pass to
`socket.connect <socket.socket.connect>` (i.e. a ``(host,
port)`` pair for IPv4; additional fields may be present for
IPv6). If a ``callback`` is passed, it will be run with the
result as an argument when it is complete.
"""
raise NotImplementedError()
class ExecutorResolver(Resolver):
def initialize(self, io_loop=None, executor=None):
self.io_loop = io_loop or IOLoop.current()
self.executor = executor or dummy_executor
@run_on_executor
def resolve(self, host, port, family=socket.AF_UNSPEC):
addrinfo = socket.getaddrinfo(host, port, family)
results = []
for family, socktype, proto, canonname, address in addrinfo:
results.append((family, address))
return results
class BlockingResolver(ExecutorResolver):
"""Default `Resolver` implementation, using `socket.getaddrinfo`.
The `.IOLoop` will be blocked during the resolution, although the
callback will not be run until the next `.IOLoop` iteration.
"""
def initialize(self, io_loop=None):
super(BlockingResolver, self).initialize(io_loop=io_loop)
class ThreadedResolver(ExecutorResolver):
"""Multithreaded non-blocking `Resolver` implementation.
Requires the `concurrent.futures` package to be installed
(available in the standard library since Python 3.2,
installable with ``pip install futures`` in older versions).
The thread pool size can be configured with::
Resolver.configure('tornado.netutil.ThreadedResolver',
num_threads=10)
"""
def initialize(self, io_loop=None, num_threads=10):
from concurrent.futures import ThreadPoolExecutor
super(ThreadedResolver, self).initialize(
io_loop=io_loop, executor=ThreadPoolExecutor(num_threads))
class OverrideResolver(Resolver):
"""Wraps a resolver with a mapping of overrides.
This can be used to make local DNS changes (e.g. for testing)
without modifying system-wide settings.
The mapping can contain either host strings or host-port pairs.
"""
def initialize(self, resolver, mapping):
self.resolver = resolver
self.mapping = mapping
def resolve(self, host, port, *args, **kwargs):
if (host, port) in self.mapping:
host, port = self.mapping[(host, port)]
elif host in self.mapping:
host = self.mapping[host]
return self.resolver.resolve(host, port, *args, **kwargs)
# These are the keyword arguments to ssl.wrap_socket that must be translated
# to their SSLContext equivalents (the other arguments are still passed
# to SSLContext.wrap_socket).
_SSL_CONTEXT_KEYWORDS = frozenset(['ssl_version', 'certfile', 'keyfile',
'cert_reqs', 'ca_certs', 'ciphers'])
def ssl_options_to_context(ssl_options):
"""Try to convert an ``ssl_options`` dictionary to an
`~ssl.SSLContext` object.
The ``ssl_options`` dictionary contains keywords to be passed to
`ssl.wrap_socket`. In Python 3.2+, `ssl.SSLContext` objects can
be used instead. This function converts the dict form to its
`~ssl.SSLContext` equivalent, and may be used when a component which
accepts both forms needs to upgrade to the `~ssl.SSLContext` version
to use features like SNI or NPN.
"""
if isinstance(ssl_options, dict):
assert all(k in _SSL_CONTEXT_KEYWORDS for k in ssl_options), ssl_options
if (not hasattr(ssl, 'SSLContext') or
isinstance(ssl_options, ssl.SSLContext)):
return ssl_options
context = ssl.SSLContext(
ssl_options.get('ssl_version', ssl.PROTOCOL_SSLv23))
if 'certfile' in ssl_options:
context.load_cert_chain(ssl_options['certfile'], ssl_options.get('keyfile', None))
if 'cert_reqs' in ssl_options:
context.verify_mode = ssl_options['cert_reqs']
if 'ca_certs' in ssl_options:
context.load_verify_locations(ssl_options['ca_certs'])
if 'ciphers' in ssl_options:
context.set_ciphers(ssl_options['ciphers'])
return context
def ssl_wrap_socket(socket, ssl_options, server_hostname=None, **kwargs):
"""Returns an ``ssl.SSLSocket`` wrapping the given socket.
``ssl_options`` may be either a dictionary (as accepted by
`ssl_options_to_context`) or an `ssl.SSLContext` object.
Additional keyword arguments are passed to ``wrap_socket``
(either the `~ssl.SSLContext` method or the `ssl` module function
as appropriate).
"""
context = ssl_options_to_context(ssl_options)
if hasattr(ssl, 'SSLContext') and isinstance(context, ssl.SSLContext):
if server_hostname is not None and getattr(ssl, 'HAS_SNI'):
# Python doesn't have server-side SNI support so we can't
# really unittest this, but it can be manually tested with
# python3.2 -m tornado.httpclient https://sni.velox.ch
return context.wrap_socket(socket, server_hostname=server_hostname,
**kwargs)
else:
return context.wrap_socket(socket, **kwargs)
else:
return ssl.wrap_socket(socket, **dict(context, **kwargs))
if hasattr(ssl, 'match_hostname'): # python 3.2+
ssl_match_hostname = ssl.match_hostname
SSLCertificateError = ssl.CertificateError
else:
# match_hostname was added to the standard library ssl module in python 3.2.
# The following code was backported for older releases and copied from
# https://bitbucket.org/brandon/backports.ssl_match_hostname
class SSLCertificateError(ValueError):
pass
def _dnsname_to_pat(dn):
pats = []
for frag in dn.split(r'.'):
if frag == '*':
# When '*' is a fragment by itself, it matches a non-empty dotless
# fragment.
pats.append('[^.]+')
else:
# Otherwise, '*' matches any dotless fragment.
frag = re.escape(frag)
pats.append(frag.replace(r'\*', '[^.]*'))
return re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE)
def ssl_match_hostname(cert, hostname):
"""Verify that *cert* (in decoded format as returned by
SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 rules
are mostly followed, but IP addresses are not accepted for *hostname*.
CertificateError is raised on failure. On success, the function
returns nothing.
"""
if not cert:
raise ValueError("empty or no certificate")
dnsnames = []
san = cert.get('subjectAltName', ())
for key, value in san:
if key == 'DNS':
if _dnsname_to_pat(value).match(hostname):
return
dnsnames.append(value)
if not san:
# The subject is only checked when subjectAltName is empty
for sub in cert.get('subject', ()):
for key, value in sub:
# XXX according to RFC 2818, the most specific Common Name
# must be used.
if key == 'commonName':
if _dnsname_to_pat(value).match(hostname):
return
dnsnames.append(value)
if len(dnsnames) > 1:
raise SSLCertificateError("hostname %r "
"doesn't match either of %s"
% (hostname, ', '.join(map(repr, dnsnames))))
elif len(dnsnames) == 1:
raise SSLCertificateError("hostname %r "
"doesn't match %r"
% (hostname, dnsnames[0]))
else:
raise SSLCertificateError("no appropriate commonName or "
"subjectAltName fields were found")
| mountainpenguin/BySH | server/lib/tornado/netutil.py | Python | gpl-3.0 | 15,081 |
import re
import traceback
from urllib.parse import quote
from requests.utils import dict_from_cookiejar
from sickchill import logger
from sickchill.helper.common import convert_size, try_int
from sickchill.oldbeard import tvcache
from sickchill.oldbeard.bs4_parser import BS4Parser
from sickchill.providers.torrent.TorrentProvider import TorrentProvider
class Provider(TorrentProvider):
def __init__(self):
super().__init__("Pretome")
self.username = None
self.password = None
self.pin = None
self.minseed = 0
self.minleech = 0
self.urls = {
"base_url": "https://pretome.info",
"login": "https://pretome.info/takelogin.php",
"detail": "https://pretome.info/details.php?id=%s",
"search": "https://pretome.info/browse.php?search=%s%s",
"download": "https://pretome.info/download.php/%s/%s.torrent",
}
self.url = self.urls["base_url"]
self.categories = "&st=1&cat%5B%5D=7"
self.proper_strings = ["PROPER", "REPACK"]
self.cache = tvcache.TVCache(self)
def _check_auth(self):
if not self.username or not self.password or not self.pin:
logger.warning("Invalid username or password or pin. Check your settings")
return True
def login(self):
if any(dict_from_cookiejar(self.session.cookies).values()):
return True
login_params = {"username": self.username, "password": self.password, "login_pin": self.pin}
response = self.get_url(self.urls["login"], post_data=login_params, returns="text")
if not response:
logger.warning("Unable to connect to provider")
return False
if re.search("Username or password incorrect", response):
logger.warning("Invalid username or password. Check your settings")
return False
return True
def search(self, search_params, age=0, ep_obj=None):
results = []
if not self.login():
return results
for mode in search_params:
items = []
logger.debug(_("Search Mode: {mode}".format(mode=mode)))
for search_string in search_params[mode]:
if mode != "RSS":
logger.debug(_("Search String: {search_string}".format(search_string=search_string)))
search_url = self.urls["search"] % (quote(search_string), self.categories)
data = self.get_url(search_url, returns="text")
if not data:
continue
try:
with BS4Parser(data, "html5lib") as html:
# Continue only if one Release is found
empty = html.find("h2", text="No .torrents fit this filter criteria")
if empty:
logger.debug("Data returned from provider does not contain any torrents")
continue
torrent_table = html.find("table", style="border: none; width: 100%;")
if not torrent_table:
logger.exception("Could not find table of torrents")
continue
torrent_rows = torrent_table("tr", class_="browse")
for result in torrent_rows:
cells = result("td")
size = None
link = cells[1].find("a", style="font-size: 1.25em; font-weight: bold;")
torrent_id = link["href"].replace("details.php?id=", "")
try:
if link.get("title", ""):
title = link["title"]
else:
title = link.contents[0]
download_url = self.urls["download"] % (torrent_id, link.contents[0])
seeders = int(cells[9].contents[0])
leechers = int(cells[10].contents[0])
# Need size for failed downloads handling
if size is None:
torrent_size = cells[7].text
size = convert_size(torrent_size) or -1
except (AttributeError, TypeError):
continue
if not all([title, download_url]):
continue
# Filter unseeded torrent
if seeders < self.minseed or leechers < self.minleech:
if mode != "RSS":
logger.debug(
"Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(
title, seeders, leechers
)
)
continue
item = {"title": title, "link": download_url, "size": size, "seeders": seeders, "leechers": leechers, "hash": ""}
if mode != "RSS":
logger.debug("Found result: {0} with {1} seeders and {2} leechers".format(title, seeders, leechers))
items.append(item)
except Exception:
logger.exception("Failed parsing provider. Traceback: {0}".format(traceback.format_exc()))
# For each search mode sort all the items by seeders if available
items.sort(key=lambda d: try_int(d.get("seeders", 0)), reverse=True)
results += items
return results
| h3llrais3r/SickRage | sickchill/oldbeard/providers/pretome.py | Python | gpl-3.0 | 5,958 |
# -*- encoding: UTF-8 -*-
import re
import sys
import os
import traceback
from ..ibdawg import IBDAWG
from ..echo import echo
from . import gc_options
__all__ = [ "lang", "locales", "pkg", "name", "version", "author", \
"load", "parse", "getDictionary", \
"setOptions", "getOptions", "getOptionsLabels", "resetOptions", \
"ignoreRule", "resetIgnoreRules" ]
__version__ = u"${version}"
lang = u"${lang}"
locales = ${loc}
pkg = u"${implname}"
name = u"${name}"
version = u"${version}"
author = u"${author}"
# commons regexes
_zEndOfSentence = re.compile(u'([.?!:;…][ .?!… »”")]*|.$)')
_zBeginOfParagraph = re.compile(u"^\W*")
_zEndOfParagraph = re.compile(u"\W*$")
_zNextWord = re.compile(u" +(\w[\w-]*)")
_zPrevWord = re.compile(u"(\w[\w-]*) +$")
# grammar rules and dictionary
_rules = None
_dOptions = dict(gc_options.dOpt) # duplication necessary, to be able to reset to default
_aIgnoredRules = set()
_oDict = None
_dAnalyses = {} # cache for data from dictionary
_GLOBALS = globals()
#### Parsing
def parse (sText, sCountry="${country_default}", bDebug=False, dOptions=None):
"analyses the paragraph sText and returns list of errors"
aErrors = None
sAlt = sText
dDA = {}
dOpt = _dOptions if not dOptions else dOptions
# parse paragraph
try:
sNew, aErrors = _proofread(sText, sAlt, 0, True, dDA, sCountry, dOpt, bDebug)
if sNew:
sText = sNew
except:
raise
# parse sentences
for iStart, iEnd in _getSentenceBoundaries(sText):
if 4 < (iEnd - iStart) < 2000:
dDA.clear()
try:
_, errs = _proofread(sText[iStart:iEnd], sAlt[iStart:iEnd], iStart, False, dDA, sCountry, dOpt, bDebug)
aErrors.extend(errs)
except:
raise
return aErrors
def _getSentenceBoundaries (sText):
iStart = _zBeginOfParagraph.match(sText).end()
for m in _zEndOfSentence.finditer(sText):
yield (iStart, m.end())
iStart = m.end()
def _proofread (s, sx, nOffset, bParagraph, dDA, sCountry, dOptions, bDebug):
aErrs = []
bChange = False
if not bParagraph:
# after the first pass, we modify automatically some characters
if u" " in s:
s = s.replace(u" ", u' ') # nbsp
bChange = True
if u" " in s:
s = s.replace(u" ", u' ') # nnbsp
bChange = True
if u"@" in s:
s = s.replace(u"@", u' ')
bChange = True
if u"'" in s:
s = s.replace(u"'", u"’")
bChange = True
if u"‑" in s:
s = s.replace(u"‑", u"-") # nobreakdash
bChange = True
bIdRule = option('idrule')
for sOption, lRuleGroup in _getRules(bParagraph):
if not sOption or dOptions.get(sOption, False):
for zRegex, bUppercase, sRuleId, lActions in lRuleGroup:
if sRuleId not in _aIgnoredRules:
for m in zRegex.finditer(s):
for sFuncCond, cActionType, sWhat, *eAct in lActions:
# action in lActions: [ condition, action type, replacement/suggestion/action[, iGroup[, message, URL]] ]
try:
if not sFuncCond or _GLOBALS[sFuncCond](s, sx, m, dDA, sCountry):
if cActionType == "-":
# grammar error
# (text, replacement, nOffset, m, iGroup, sId, bUppercase, sURL, bIdRule)
aErrs.append(_createError(s, sWhat, nOffset, m, eAct[0], sRuleId, bUppercase, eAct[1], eAct[2], bIdRule, sOption))
elif cActionType == "~":
# text processor
s = _rewrite(s, sWhat, eAct[0], m, bUppercase)
bChange = True
if bDebug:
echo(u"~ " + s + " -- " + m.group(eAct[0]) + " # " + sRuleId)
elif cActionType == "=":
# disambiguation
_GLOBALS[sWhat](s, m, dDA)
if bDebug:
echo(u"= " + m.group(0) + " # " + sRuleId + "\nDA: " + str(dDA))
else:
echo("# error: unknown action at " + sRuleId)
except Exception as e:
raise Exception(str(e), sRuleId)
if bChange:
return (s, aErrs)
return (False, aErrs)
def _createWriterError (s, sRepl, nOffset, m, iGroup, sId, bUppercase, sMsg, sURL, bIdRule, sOption):
"error for Writer (LO/OO)"
xErr = SingleProofreadingError()
#xErr = uno.createUnoStruct( "com.sun.star.linguistic2.SingleProofreadingError" )
xErr.nErrorStart = nOffset + m.start(iGroup)
xErr.nErrorLength = m.end(iGroup) - m.start(iGroup)
xErr.nErrorType = PROOFREADING
xErr.aRuleIdentifier = sId
# suggestions
if sRepl[0:1] == "=":
sugg = _GLOBALS[sRepl[1:]](s, m)
if sugg:
if bUppercase and m.group(iGroup)[0:1].isupper():
xErr.aSuggestions = tuple(map(str.capitalize, sugg.split("|")))
else:
xErr.aSuggestions = tuple(sugg.split("|"))
else:
xErr.aSuggestions = ()
elif sRepl == "_":
xErr.aSuggestions = ()
else:
if bUppercase and m.group(iGroup)[0:1].isupper():
xErr.aSuggestions = tuple(map(str.capitalize, m.expand(sRepl).split("|")))
else:
xErr.aSuggestions = tuple(m.expand(sRepl).split("|"))
# Message
if sMsg[0:1] == "=":
sMessage = _GLOBALS[sMsg[1:]](s, m)
else:
sMessage = m.expand(sMsg)
xErr.aShortComment = sMessage # sMessage.split("|")[0] # in context menu
xErr.aFullComment = sMessage # sMessage.split("|")[-1] # in dialog
if bIdRule:
xErr.aShortComment += " # " + sId
# URL
if sURL:
p = PropertyValue()
p.Name = "FullCommentURL"
p.Value = sURL
xErr.aProperties = (p,)
else:
xErr.aProperties = ()
return xErr
def _createDictError (s, sRepl, nOffset, m, iGroup, sId, bUppercase, sMsg, sURL, bIdRule, sOption):
"error as a dictionary"
dErr = {}
dErr["nStart"] = nOffset + m.start(iGroup)
dErr["nEnd"] = nOffset + m.end(iGroup)
dErr["sRuleId"] = sId
dErr["sType"] = sOption if sOption else "notype"
# suggestions
if sRepl[0:1] == "=":
sugg = _GLOBALS[sRepl[1:]](s, m)
if sugg:
if bUppercase and m.group(iGroup)[0:1].isupper():
dErr["aSuggestions"] = list(map(str.capitalize, sugg.split("|")))
else:
dErr["aSuggestions"] = sugg.split("|")
else:
dErr["aSuggestions"] = ()
elif sRepl == "_":
dErr["aSuggestions"] = ()
else:
if bUppercase and m.group(iGroup)[0:1].isupper():
dErr["aSuggestions"] = list(map(str.capitalize, m.expand(sRepl).split("|")))
else:
dErr["aSuggestions"] = m.expand(sRepl).split("|")
# Message
if sMsg[0:1] == "=":
sMessage = _GLOBALS[sMsg[1:]](s, m)
else:
sMessage = m.expand(sMsg)
dErr["sMessage"] = sMessage
if bIdRule:
dErr["sMessage"] += " # " + sId
# URL
dErr["URL"] = sURL if sURL else ""
return dErr
def _rewrite (s, sRepl, iGroup, m, bUppercase):
"text processor: write sRepl in s at iGroup position"
ln = m.end(iGroup) - m.start(iGroup)
if sRepl == "*":
sNew = " " * ln
elif sRepl == ">" or sRepl == "_" or sRepl == u"~":
sNew = sRepl + " " * (ln-1)
elif sRepl == "@":
sNew = "@" * ln
elif sRepl[0:1] == "=":
if sRepl[1:2] != "@":
sNew = _GLOBALS[sRepl[1:]](s, m)
sNew = sNew + " " * (ln-len(sNew))
else:
sNew = _GLOBALS[sRepl[2:]](s, m)
sNew = sNew + "@" * (ln-len(sNew))
if bUppercase and m.group(iGroup)[0:1].isupper():
sNew = sNew.capitalize()
else:
sNew = m.expand(sRepl)
sNew = sNew + " " * (ln-len(sNew))
return s[0:m.start(iGroup)] + sNew + s[m.end(iGroup):]
def ignoreRule (sId):
_aIgnoredRules.add(sId)
def resetIgnoreRules ():
_aIgnoredRules.clear()
#### init
try:
# LibreOffice / OpenOffice
from com.sun.star.linguistic2 import SingleProofreadingError
from com.sun.star.text.TextMarkupType import PROOFREADING
from com.sun.star.beans import PropertyValue
#import lightproof_handler_${implname} as opt
_createError = _createWriterError
except ImportError:
_createError = _createDictError
def load ():
global _oDict
try:
_oDict = IBDAWG("${binary_dic}")
except:
traceback.print_exc()
def setOptions (dOpt):
_dOptions.update(dOpt)
def getOptions ():
return _dOptions
def getOptionsLabels (sLang):
return gc_options.getUI(sLang)
def resetOptions ():
global _dOptions
_dOptions = dict(gc_options.dOpt)
def getDictionary ():
return _oDict
def _getRules (bParagraph):
try:
if not bParagraph:
return _rules.lSentenceRules
return _rules.lParagraphRules
except:
_loadRules()
if not bParagraph:
return _rules.lSentenceRules
return _rules.lParagraphRules
def _loadRules2 ():
from itertools import chain
from . import gc_rules
global _rules
_rules = gc_rules
# compile rules regex
for rule in chain(_rules.lParagraphRules, _rules.lSentenceRules):
try:
rule[1] = re.compile(rule[1])
except:
echo("Bad regular expression in # " + str(rule[3]))
rule[1] = "(?i)<Grammalecte>"
def _loadRules ():
from itertools import chain
from . import gc_rules
global _rules
_rules = gc_rules
# compile rules regex
for rulegroup in chain(_rules.lParagraphRules, _rules.lSentenceRules):
for rule in rulegroup[1]:
try:
rule[0] = re.compile(rule[0])
except:
echo("Bad regular expression in # " + str(rule[2]))
rule[0] = "(?i)<Grammalecte>"
def _getPath ():
return os.path.join(os.path.dirname(sys.modules[__name__].__file__), __name__ + ".py")
#### common functions
def option (sOpt):
"return True if option sOpt is active"
return _dOptions.get(sOpt, False)
def displayInfo (dDA, tWord):
"for debugging: retrieve info of word"
if not tWord:
echo("> nothing to find")
return True
if tWord[1] not in _dAnalyses and not _storeMorphFromFSA(tWord[1]):
echo("> not in FSA")
return True
if tWord[0] in dDA:
echo("DA: " + str(dDA[tWord[0]]))
echo("FSA: " + str(_dAnalyses[tWord[1]]))
return True
def _storeMorphFromFSA (sWord):
"retrieves morphologies list from _oDict -> _dAnalyses"
global _dAnalyses
_dAnalyses[sWord] = _oDict.getMorph(sWord)
return True if _dAnalyses[sWord] else False
def morph (dDA, tWord, sPattern, bStrict=True, bNoWord=False):
"analyse a tuple (position, word), return True if sPattern in morphologies (disambiguation on)"
if not tWord:
return bNoWord
if tWord[1] not in _dAnalyses and not _storeMorphFromFSA(tWord[1]):
return False
lMorph = dDA[tWord[0]] if tWord[0] in dDA else _dAnalyses[tWord[1]]
if not lMorph:
return False
p = re.compile(sPattern)
if bStrict:
return all(p.search(s) for s in lMorph)
return any(p.search(s) for s in lMorph)
def morphex (dDA, tWord, sPattern, sNegPattern, bNoWord=False):
"analyse a tuple (position, word), returns True if not sNegPattern in word morphologies and sPattern in word morphologies (disambiguation on)"
if not tWord:
return bNoWord
if tWord[1] not in _dAnalyses and not _storeMorphFromFSA(tWord[1]):
return False
lMorph = dDA[tWord[0]] if tWord[0] in dDA else _dAnalyses[tWord[1]]
# check negative condition
np = re.compile(sNegPattern)
if any(np.search(s) for s in lMorph):
return False
# search sPattern
p = re.compile(sPattern)
return any(p.search(s) for s in lMorph)
def analyse (sWord, sPattern, bStrict=True):
"analyse a word, return True if sPattern in morphologies (disambiguation off)"
if sWord not in _dAnalyses and not _storeMorphFromFSA(sWord):
return False
if not _dAnalyses[sWord]:
return False
p = re.compile(sPattern)
if bStrict:
return all(p.search(s) for s in _dAnalyses[sWord])
return any(p.search(s) for s in _dAnalyses[sWord])
def analysex (sWord, sPattern, sNegPattern):
"analyse a word, returns True if not sNegPattern in word morphologies and sPattern in word morphologies (disambiguation off)"
if sWord not in _dAnalyses and not _storeMorphFromFSA(sWord):
return False
# check negative condition
np = re.compile(sNegPattern)
if any(np.search(s) for s in _dAnalyses[sWord]):
return False
# search sPattern
p = re.compile(sPattern)
return any(p.search(s) for s in _dAnalyses[sWord])
def stem (sWord):
"returns a list of sWord's stems"
if not sWord:
return []
if sWord not in _dAnalyses and not _storeMorphFromFSA(sWord):
return []
return [ s[1:s.find(" ")] for s in _dAnalyses[sWord] ]
## functions to get text outside pattern scope
# warning: check compile_rules.py to understand how it works
def nextword (s, iStart, n):
"get the nth word of the input string or empty string"
m = re.match(u"( +[\\w%-]+){" + str(n-1) + u"} +([\\w%-]+)", s[iStart:])
if not m:
return None
return (iStart+m.start(2), m.group(2))
def prevword (s, iEnd, n):
"get the (-)nth word of the input string or empty string"
m = re.search(u"([\\w%-]+) +([\\w%-]+ +){" + str(n-1) + u"}$", s[:iEnd])
if not m:
return None
return (m.start(1), m.group(1))
def nextword1 (s, iStart):
"get next word (optimization)"
m = _zNextWord.match(s[iStart:])
if not m:
return None
return (iStart+m.start(1), m.group(1))
def prevword1 (s, iEnd):
"get previous word (optimization)"
m = _zPrevWord.search(s[:iEnd])
if not m:
return None
return (m.start(1), m.group(1))
def look (s, sPattern, sNegPattern=None):
"seek sPattern in s (before/after/fulltext), if sNegPattern not in s"
if sNegPattern and re.search(sNegPattern, s):
return False
if re.search(sPattern, s):
return True
return False
def look_chk1 (dDA, s, nOffset, sPattern, sPatternGroup1, sNegPatternGroup1=None):
"returns True if s has pattern sPattern and m.group(1) has pattern sPatternGroup1"
m = re.search(sPattern, s)
if not m:
return False
try:
sWord = m.group(1)
nPos = m.start(1) + nOffset
except:
#print("Missing group 1")
return False
if sNegPatternGroup1:
return morphex(dDA, (nPos, sWord), sPatternGroup1, sNegPatternGroup1)
return morph(dDA, (nPos, sWord), sPatternGroup1, False)
#### Disambiguator
def select (dDA, nPos, sWord, sPattern, lDefault=None):
if not sWord:
return True
if nPos in dDA:
return True
if sWord not in _dAnalyses and not _storeMorphFromFSA(sWord):
return True
if len(_dAnalyses[sWord]) == 1:
return True
lSelect = [ sMorph for sMorph in _dAnalyses[sWord] if re.search(sPattern, sMorph) ]
if lSelect:
if len(lSelect) != len(_dAnalyses[sWord]):
dDA[nPos] = lSelect
#echo("= "+sWord+" "+str(dDA.get(nPos, "null")))
elif lDefault:
dDA[nPos] = lDefault
#echo("= "+sWord+" "+str(dDA.get(nPos, "null")))
return True
def exclude (dDA, nPos, sWord, sPattern, lDefault=None):
if not sWord:
return True
if nPos in dDA:
return True
if sWord not in _dAnalyses and not _storeMorphFromFSA(sWord):
return True
if len(_dAnalyses[sWord]) == 1:
return True
lSelect = [ sMorph for sMorph in _dAnalyses[sWord] if not re.search(sPattern, sMorph) ]
if lSelect:
if len(lSelect) != len(_dAnalyses[sWord]):
dDA[nPos] = lSelect
#echo("= "+sWord+" "+str(dDA.get(nPos, "null")))
elif lDefault:
dDA[nPos] = lDefault
#echo("= "+sWord+" "+str(dDA.get(nPos, "null")))
return True
def define (dDA, nPos, lMorph):
dDA[nPos] = lMorph
#echo("= "+str(nPos)+" "+str(dDA[nPos]))
return True
#### GRAMMAR CHECKER PLUGINS
${plugins}
${generated}
| SamuelLongchamps/grammalecte | gc_core/py/gc_engine.py | Python | gpl-3.0 | 17,150 |
# Copyright 2019 Virgil Dupras
#
# This software is licensed under the "GPLv3" License as described in the "LICENSE" file,
# which should be included with this package. The terms are also available at
# http://www.gnu.org/licenses/gpl-3.0.html
import datetime
from collections import defaultdict
from core.util import dedupe, first as getfirst
from core.trans import tr
from ..model.date import DateFormat
from .base import GUIObject
from .import_table import ImportTable
from .selectable_list import LinkedSelectableList
DAY = 'day'
MONTH = 'month'
YEAR = 'year'
class SwapType:
DayMonth = 0
MonthYear = 1
DayYear = 2
DescriptionPayee = 3
InvertAmount = 4
def last_two_digits(year):
return year - ((year // 100) * 100)
def swapped_date(date, first, second):
attrs = {DAY: date.day, MONTH: date.month, YEAR: last_two_digits(date.year)}
newattrs = {first: attrs[second], second: attrs[first]}
if YEAR in newattrs:
newattrs[YEAR] += 2000
return date.replace(**newattrs)
def swap_format_elements(format, first, second):
# format is a DateFormat
swapped = format.copy()
elems = swapped.elements
TYPE2CHAR = {DAY: 'd', MONTH: 'M', YEAR: 'y'}
first_char = TYPE2CHAR[first]
second_char = TYPE2CHAR[second]
first_index = [i for i, x in enumerate(elems) if x.startswith(first_char)][0]
second_index = [i for i, x in enumerate(elems) if x.startswith(second_char)][0]
elems[first_index], elems[second_index] = elems[second_index], elems[first_index]
return swapped
class AccountPane:
def __init__(self, iwin, account, target_account, parsing_date_format):
self.iwin = iwin
self.account = account
self._selected_target = target_account
self.name = account.name
entries = iwin.loader.accounts.entries_for_account(account)
self.count = len(entries)
self.matches = [] # [[ref, imported]]
self.parsing_date_format = parsing_date_format
self.max_day = 31
self.max_month = 12
self.max_year = 99 # 2 digits
self._match_entries()
self._swap_possibilities = set()
self._compute_swap_possibilities()
def _compute_swap_possibilities(self):
entries = list(self.iwin.loader.accounts.entries_for_account(self.account))
if not entries:
return
self._swap_possibilities = set([(DAY, MONTH), (MONTH, YEAR), (DAY, YEAR)])
for first, second in self._swap_possibilities.copy():
for entry in entries:
try:
swapped_date(entry.date, first, second)
except ValueError:
self._swap_possibilities.remove((first, second))
break
def _match_entries(self):
to_import = list(self.iwin.loader.accounts.entries_for_account(self.account))
reference2entry = {}
for entry in (e for e in to_import if e.reference):
reference2entry[entry.reference] = entry
self.matches = []
if self.selected_target is not None:
entries = self.iwin.document.accounts.entries_for_account(self.selected_target)
for entry in entries:
if entry.reference in reference2entry:
other = reference2entry[entry.reference]
if entry.reconciled:
self.iwin.import_table.dont_import.add(other)
to_import.remove(other)
del reference2entry[entry.reference]
else:
other = None
if other is not None or not entry.reconciled:
self.matches.append([entry, other])
self.matches += [[None, entry] for entry in to_import]
self._sort_matches()
def _sort_matches(self):
self.matches.sort(key=lambda t: t[0].date if t[0] is not None else t[1].date)
def bind(self, existing, imported):
[match1] = [m for m in self.matches if m[0] is existing]
[match2] = [m for m in self.matches if m[1] is imported]
assert match1[1] is None
assert match2[0] is None
match1[1] = match2[1]
self.matches.remove(match2)
def can_swap_date_fields(self, first, second): # 'day', 'month', 'year'
return (first, second) in self._swap_possibilities or (second, first) in self._swap_possibilities
def match_entries_by_date_and_amount(self, threshold):
delta = datetime.timedelta(days=threshold)
unmatched = (
to_import for ref, to_import in self.matches if ref is None)
unmatched_refs = (
ref for ref, to_import in self.matches if to_import is None)
amount2refs = defaultdict(list)
for entry in unmatched_refs:
amount2refs[entry.amount].append(entry)
for entry in unmatched:
if entry.amount not in amount2refs:
continue
potentials = amount2refs[entry.amount]
for ref in potentials:
if abs(ref.date - entry.date) <= delta:
self.bind(ref, entry)
potentials.remove(ref)
self._sort_matches()
def unbind(self, existing, imported):
[match] = [m for m in self.matches if m[0] is existing and m[1] is imported]
match[1] = None
self.matches.append([None, imported])
self._sort_matches()
@property
def selected_target(self):
return self._selected_target
@selected_target.setter
def selected_target(self, value):
self._selected_target = value
self._match_entries()
# This is a modal window that is designed to be re-instantiated on each import
# run. It is shown modally by the UI as soon as its created on the UI side.
class ImportWindow(GUIObject):
# --- View interface
# close()
# close_selected_tab()
# set_swap_button_enabled(enabled: bool)
# update_selected_pane()
# show()
#
def __init__(self, mainwindow, target_account=None):
super().__init__()
if not hasattr(mainwindow, 'loader'):
raise ValueError("Nothing to import!")
self.mainwindow = mainwindow
self.document = mainwindow.document
self.app = self.document.app
self._selected_pane_index = 0
self._selected_target_index = 0
def setfunc(index):
self.view.set_swap_button_enabled(self.can_perform_swap())
self.swap_type_list = LinkedSelectableList(items=[
"<placeholder> Day <--> Month",
"<placeholder> Month <--> Year",
"<placeholder> Day <--> Year",
tr("Description <--> Payee"),
tr("Invert Amounts"),
], setfunc=setfunc)
self.swap_type_list.selected_index = SwapType.DayMonth
self.panes = []
self.import_table = ImportTable(self)
self.loader = self.mainwindow.loader
self.target_accounts = [
a for a in self.document.accounts if a.is_balance_sheet_account()]
self.target_accounts.sort(key=lambda a: a.name.lower())
accounts = []
for account in self.loader.accounts:
if account.is_balance_sheet_account():
entries = self.loader.accounts.entries_for_account(account)
if len(entries):
new_name = self.document.accounts.new_name(account.name)
if new_name != account.name:
self.loader.accounts.rename_account(account, new_name)
accounts.append(account)
parsing_date_format = DateFormat.from_sysformat(self.loader.parsing_date_format)
for account in accounts:
target = target_account
if target is None and account.reference:
target = getfirst(
t for t in self.target_accounts if t.reference == account.reference
)
self.panes.append(
AccountPane(self, account, target, parsing_date_format))
# --- Private
def _can_swap_date_fields(self, first, second): # 'day', 'month', 'year'
pane = self.selected_pane
if pane is None:
return False
return pane.can_swap_date_fields(first, second)
def _invert_amounts(self, apply_to_all):
if apply_to_all:
panes = self.panes
else:
panes = [self.selected_pane]
for pane in panes:
entries = self.loader.accounts.entries_for_account(pane.account)
txns = dedupe(e.transaction for e in entries)
for txn in txns:
for split in txn.splits:
split.amount = -split.amount
self.import_table.refresh()
def _refresh_target_selection(self):
if not self.panes:
return
target = self.selected_pane.selected_target
self._selected_target_index = 0
if target is not None:
try:
self._selected_target_index = self.target_accounts.index(target) + 1
except ValueError:
pass
def _refresh_swap_list_items(self):
if not self.panes:
return
items = []
basefmt = self.selected_pane.parsing_date_format
for first, second in [(DAY, MONTH), (MONTH, YEAR), (DAY, YEAR)]:
swapped = swap_format_elements(basefmt, first, second)
items.append("{} --> {}".format(basefmt.iso_format, swapped.iso_format))
self.swap_type_list[:3] = items
def _swap_date_fields(self, first, second, apply_to_all): # 'day', 'month', 'year'
assert self._can_swap_date_fields(first, second)
if apply_to_all:
panes = [p for p in self.panes if p.can_swap_date_fields(first, second)]
else:
panes = [self.selected_pane]
def switch_func(txn):
txn.date = swapped_date(txn.date, first, second)
self._swap_fields(panes, switch_func)
# Now, lets' change the date format on these panes
for pane in panes:
basefmt = self.selected_pane.parsing_date_format
swapped = swap_format_elements(basefmt, first, second)
pane.parsing_date_format = swapped
pane._sort_matches()
self.import_table.refresh()
self._refresh_swap_list_items()
def _swap_description_payee(self, apply_to_all):
if apply_to_all:
panes = self.panes
else:
panes = [self.selected_pane]
def switch_func(txn):
txn.description, txn.payee = txn.payee, txn.description
self._swap_fields(panes, switch_func)
def _swap_fields(self, panes, switch_func):
seen = set()
for pane in panes:
entries = self.loader.accounts.entries_for_account(pane.account)
txns = dedupe(e.transaction for e in entries)
for txn in txns:
if txn.affected_accounts() & seen:
# We've already swapped this txn in a previous pane.
continue
switch_func(txn)
seen.add(pane.account)
self.import_table.refresh()
def _update_selected_pane(self):
self.import_table.refresh()
self._refresh_swap_list_items()
self.view.update_selected_pane()
self.view.set_swap_button_enabled(self.can_perform_swap())
# --- Override
def _view_updated(self):
if self.document.can_restore_from_prefs():
self.restore_view()
# XXX Logically, we should call _update_selected_pane() but doing so
# make tests fail. to investigate.
self._refresh_target_selection()
self.view.update_selected_pane()
self._refresh_swap_list_items()
self.import_table.refresh()
# --- Public
def can_perform_swap(self):
index = self.swap_type_list.selected_index
if index == SwapType.DayMonth:
return self._can_swap_date_fields(DAY, MONTH)
elif index == SwapType.MonthYear:
return self._can_swap_date_fields(MONTH, YEAR)
elif index == SwapType.DayYear:
return self._can_swap_date_fields(DAY, YEAR)
else:
return True
def close_pane(self, index):
was_selected = index == self.selected_pane_index
del self.panes[index]
if not self.panes:
self.view.close()
return
self._selected_pane_index = min(self._selected_pane_index, len(self.panes) - 1)
if was_selected:
self._update_selected_pane()
def import_selected_pane(self):
pane = self.selected_pane
matches = pane.matches
matches = [
(e, ref) for ref, e in matches
if e is not None and e not in self.import_table.dont_import]
if pane.selected_target is not None:
# We import in an existing account, adjust all the transactions accordingly
target_account = pane.selected_target
else:
target_account = None
self.document.import_entries(target_account, pane.account, matches)
self.mainwindow.revalidate()
self.close_pane(self.selected_pane_index)
self.view.close_selected_tab()
def match_entries_by_date_and_amount(self, threshold):
self.selected_pane.match_entries_by_date_and_amount(threshold)
self.import_table.refresh()
def perform_swap(self, apply_to_all=False):
index = self.swap_type_list.selected_index
if index == SwapType.DayMonth:
self._swap_date_fields(DAY, MONTH, apply_to_all=apply_to_all)
elif index == SwapType.MonthYear:
self._swap_date_fields(MONTH, YEAR, apply_to_all=apply_to_all)
elif index == SwapType.DayYear:
self._swap_date_fields(DAY, YEAR, apply_to_all=apply_to_all)
elif index == SwapType.DescriptionPayee:
self._swap_description_payee(apply_to_all=apply_to_all)
elif index == SwapType.InvertAmount:
self._invert_amounts(apply_to_all=apply_to_all)
def restore_view(self):
self.import_table.columns.restore_columns()
# --- Properties
@property
def selected_pane(self):
return self.panes[self.selected_pane_index] if self.panes else None
@property
def selected_pane_index(self):
return self._selected_pane_index
@selected_pane_index.setter
def selected_pane_index(self, value):
if value >= len(self.panes):
return
self._selected_pane_index = value
self._refresh_target_selection()
self._update_selected_pane()
@property
def selected_target_account(self):
return self.selected_pane.selected_target
@property
def selected_target_account_index(self):
return self._selected_target_index
@selected_target_account_index.setter
def selected_target_account_index(self, value):
target = self.target_accounts[value - 1] if value > 0 else None
self.selected_pane.selected_target = target
self._selected_target_index = value
self.import_table.refresh()
@property
def target_account_names(self):
return [tr('< New Account >')] + [a.name for a in self.target_accounts]
| hsoft/moneyguru | core/gui/import_window.py | Python | gpl-3.0 | 15,326 |
#!/usr/bin/env python
# coding=utf-8
"""30. Digit fifth powers
https://projecteuler.net/problem=30
Surprisingly there are only three numbers that can be written as the sum of
fourth powers of their digits:
> 1634 = 14 \+ 64 \+ 34 \+ 44
> 8208 = 84 \+ 24 \+ 04 \+ 84
> 9474 = 94 \+ 44 \+ 74 \+ 44
As 1 = 14 is not a sum it is not included.
The sum of these numbers is 1634 + 8208 + 9474 = 19316.
Find the sum of all the numbers that can be written as the sum of fifth powers
of their digits.
"""
| openqt/algorithms | projecteuler/pe030-digit-fifth-powers.py | Python | gpl-3.0 | 507 |
import sys, math
from test import goertzel
import wave
import pyaudio
import Queue
import numpy as np
if len(sys.argv) < 2:
print "Usage: %s <filename> " % sys.argv[0]
sys.exit(1)
filename = sys.argv[1]
w = wave.open(filename)
fs = w.getframerate()
width = w.getsampwidth()
chunkDuration = .2 #.2 second chunks
chunk = int(chunkDuration*fs)
window = np.blackman(chunk)
p = pyaudio.PyAudio()
stream = p.open(format = p.get_format_from_width(w.getsampwidth()), channels = w.getnchannels(),rate = fs, output=True)
#read .2 second chunk
data = w.readframes(chunk)
chunk_data = []
#find the frequencies of each chunk
print "Running calculations on wav file"
num = 0
while data != '':
print "Calculating Chunk " + str(num)
stream.write(data)
indata = np.array(wave.struct.unpack("%dh"%(len(data)/width),\
data))
freqs , results = goertzel(indata,fs, (1036,1058), (1567,1569), (2082,2104))
chunk_data.append((freqs,results))
data = w.readframes(chunk)
num+=.2
stream.close()
p.terminate()
#finished getting data from chunks, now to parse the data
hi = []
lo = []
mid = []
#average first second of audio to get frequency baselines
for i in range (5):
a = chunk_data[i][0]
b = chunk_data[i][1]
for j in range(len(a)):
if a[j] > 1700:
hi.append(b[j])
elif a[j] < 1300:
lo.append(b[j])
else:
mid.append(b[j])
hi_average = sum(hi)/float(len(hi))
lo_average = sum(lo)/float(len(lo))
mid_average = sum(mid)/float(len(mid))
"""
Determine the frequency in each .2 second chunk that has the highest amplitude increase from its average, then determine the frequency
of that second of data by the median frequency of its 5 chunks
"""
#looks for start signal in last 3 seconds of audio
def signal_found(arr):
lst = arr[-15:]
first = 0
second = 0
third = 0
for i in range(0,5):
if lst[i]=="mid":
first += 1
for i in range(5,10):
if lst[i]=="mid":
second += 1
for i in range(10,15):
if lst[i]=="mid":
third += 1
if first >= 5 and second >= 5 and third >= 5:
return True
else:
return False
#gets freq of 1 second of audio
def get_freq(arr):
lo_count = 0
hi_count = 0
mid_count = 0
for i in arr:
if i=="lo":
lo_count+=1
if i=="hi":
hi_count+=1
if i=="mid":
mid_count+=1
if mid_count > hi_count and mid_count > lo_count:
return 2
if lo_count>hi_count:
return 0
else:
return 1
start = False
freq_list = []
offset = 0
bits = []
for i in range(5,len(chunk_data)):
a = chunk_data[i][0]
b = chunk_data[i][1]
hi_amp = []
lo_amp = []
mid_amp = []
#get averages for each freq
for j in range(len(a)):
if a[j] > 1700:
hi_amp.append(b[j])
elif a[j] < 1300:
lo_amp.append(b[j])
else:
mid_amp.append(b[j])
hi_av = sum(hi_amp)/float(len(hi_amp))
lo_av = sum(lo_amp)/float(len(lo_amp))
mid_av = sum(mid_amp)/float(len(mid_amp))
#get freq of this chunk
diff = [lo_av-lo_average,mid_av-mid_average,hi_av-hi_average]
index = diff.index(max(diff))
if(index==0):
freq_list.append("lo")
if(index==1):
freq_list.append("mid")
if(index==2):
freq_list.append("hi")
print(freq_list[len(freq_list)-1])
if len(freq_list) > 5:
if start:
if len(freq_list)%5 == offset:
bit = get_freq(freq_list[-5:])
if bit != 2:
bits.append(bit)
else:
print "Stop Signal Detected"
break
elif len(freq_list) >= 15:
if signal_found(freq_list):
print "signal found"
start = True
offset = len(freq_list)%5
print bits
| jloloew/AirBridge | parse.py | Python | gpl-3.0 | 3,509 |
from controllers.job_ctrl import JobController
from models.job_model import JobModel
from views.job_view import JobView
class MainController(object):
def __init__(self, main_model):
self.main_view = None
self.main_model = main_model
self.main_model.begin_job_fetch.connect(self.on_begin_job_fetch)
self.main_model.update_job_fetch_progress.connect(self.on_job_fetch_update)
self.main_model.fetched_job.connect(self.on_fetched_job)
def init_ui(self, main_view):
self.main_view = main_view
self.init_hotkeys()
def init_hotkeys(self):
self.main_model.hotkey_model.add_hotkey(["Lcontrol", "Lmenu", "J"], self.main_view.focus_job_num_edit)
self.main_model.hotkey_model.add_hotkey(["Lcontrol", "Lmenu", "O"], self.main_view.open_current_job_folder)
self.main_model.hotkey_model.add_hotkey(["Lcontrol", "Lmenu", "B"], self.main_view.open_current_job_basecamp)
self.main_model.hotkey_model.start_detection()
def fetch_job(self):
job_num = self.main_view.job_num
if self.main_model.job_exists(job_num):
self.main_view.show_job_already_exists_dialog()
return
self.main_model.fetch_job(job_num)
def cancel_job_fetch(self):
self.main_model.cancel_job_fetch()
def on_begin_job_fetch(self, max):
self.main_view.show_job_fetch_progress_dialog(max)
def on_job_fetch_update(self, progress):
self.main_view.update_job_fetch_progress_dialog(progress)
def on_fetched_job(self, job_num, base_folder):
job = JobModel(job_num,
base_folder,
self.main_model.settings_model.basecamp_email,
self.main_model.settings_model.basecamp_password,
self.main_model.settings_model.google_maps_js_api_key,
self.main_model.settings_model.google_maps_static_api_key,
self.main_model.settings_model.google_earth_exe_path,
self.main_model.settings_model.scene_exe_path)
self.main_model.jobs[job.job_num] = job
found = bool(job.base_folder)
self.main_view.close_job_fetch_progress_dialog()
if not found:
open_anyway = self.main_view.show_job_not_found_dialog()
if not open_anyway:
return
job_view = JobView(JobController(job))
job_view.request_minimize.connect(self.main_view.close)
self.main_view.add_tab(job_view, job.job_name)
def remove_job(self, index):
job_num = int(self.main_view.ui.jobs_tab_widget.tabText(index)[1:])
self.main_model.jobs.pop(job_num, None)
self.main_view.remove_tab(index)
| redline-forensics/auto-dm | controllers/main_ctrl.py | Python | gpl-3.0 | 2,757 |
#!/usr/bin/python
#
# Problem: Making Chess Boards
# Language: Python
# Author: KirarinSnow
# Usage: python thisfile.py <input.in >output.out
from heapq import *
def process(r1, r2, c1, c2):
for i in range(r1, r2):
for j in range(c1, c2):
if 0 <= i < m and 0 <= j < n:
if g[i][j] == None:
s[i][j] = 0
elif i == 0 or j == 0:
s[i][j] = 1
elif g[i-1][j] != g[i][j] and g[i][j-1] != g[i][j] and \
g[i-1][j-1] == g[i][j]:
s[i][j] = 1 + min(s[i-1][j], s[i][j-1], s[i-1][j-1])
else:
s[i][j] = 1
heappush(q, (-s[i][j], i, j))
def clear(r1, r2, c1, c2):
for i in range(r1, r2):
for j in range(c1, c2):
if 0 <= i < m and 0 <= j < n:
g[i][j] = None
for case in range(int(raw_input())):
m, n = map(int, raw_input().split())
v = [eval('0x'+raw_input()) for i in range(m)]
g = map(lambda x: map(lambda y: (x>>y)%2, range(n)[::-1]), v)
s = [[1 for i in range(n)] for j in range(m)]
q = []
process(0, m, 0, n)
b = []
while q:
x, r, c = heappop(q)
if x != 0 and s[r][c] == -x:
b.append((-x, r, c))
clear(r+x+1, r+1, c+x+1, c+1)
process(r+x+1, r-x+1, c+x+1, c-x+1)
vs = sorted(list(set(map(lambda x: x[0], b))))[::-1]
print "Case #%d: %d" % (case+1, len(vs))
for k in vs:
print k, len(filter(lambda x: x[0] == k, b))
| KirarinSnow/Google-Code-Jam | Round 1C 2010/C.py | Python | gpl-3.0 | 1,627 |
from .gaussian_process import RandomFeatureGaussianProcess, mean_field_logits
from .spectral_normalization import SpectralNormalization
| gagnonlg/explore-ml | sngp/tf_import/__init__.py | Python | gpl-3.0 | 136 |
import unittest
from test import support
import os
import io
import socket
import urllib.request
from urllib.request import Request, OpenerDirector
# XXX
# Request
# CacheFTPHandler (hard to write)
# parse_keqv_list, parse_http_list, HTTPDigestAuthHandler
class TrivialTests(unittest.TestCase):
def test_trivial(self):
# A couple trivial tests
self.assertRaises(ValueError, urllib.request.urlopen, 'bogus url')
# XXX Name hacking to get this to work on Windows.
fname = os.path.abspath(urllib.request.__file__).replace('\\', '/')
# And more hacking to get it to work on MacOS. This assumes
# urllib.pathname2url works, unfortunately...
if os.name == 'mac':
fname = '/' + fname.replace(':', '/')
if os.name == 'nt':
file_url = "file:///%s" % fname
else:
file_url = "file://%s" % fname
f = urllib.request.urlopen(file_url)
buf = f.read()
f.close()
def test_parse_http_list(self):
tests = [
('a,b,c', ['a', 'b', 'c']),
('path"o,l"og"i"cal, example', ['path"o,l"og"i"cal', 'example']),
('a, b, "c", "d", "e,f", g, h',
['a', 'b', '"c"', '"d"', '"e,f"', 'g', 'h']),
('a="b\\"c", d="e\\,f", g="h\\\\i"',
['a="b"c"', 'd="e,f"', 'g="h\\i"'])]
for string, list in tests:
self.assertEqual(urllib.request.parse_http_list(string), list)
def test_request_headers_dict():
"""
The Request.headers dictionary is not a documented interface. It should
stay that way, because the complete set of headers are only accessible
through the .get_header(), .has_header(), .header_items() interface.
However, .headers pre-dates those methods, and so real code will be using
the dictionary.
The introduction in 2.4 of those methods was a mistake for the same reason:
code that previously saw all (urllib2 user)-provided headers in .headers
now sees only a subset (and the function interface is ugly and incomplete).
A better change would have been to replace .headers dict with a dict
subclass (or UserDict.DictMixin instance?) that preserved the .headers
interface and also provided access to the "unredirected" headers. It's
probably too late to fix that, though.
Check .capitalize() case normalization:
>>> url = "http://example.com"
>>> Request(url, headers={"Spam-eggs": "blah"}).headers["Spam-eggs"]
'blah'
>>> Request(url, headers={"spam-EggS": "blah"}).headers["Spam-eggs"]
'blah'
Currently, Request(url, "Spam-eggs").headers["Spam-Eggs"] raises KeyError,
but that could be changed in future.
"""
def test_request_headers_methods():
"""
Note the case normalization of header names here, to .capitalize()-case.
This should be preserved for backwards-compatibility. (In the HTTP case,
normalization to .title()-case is done by urllib2 before sending headers to
http.client).
>>> url = "http://example.com"
>>> r = Request(url, headers={"Spam-eggs": "blah"})
>>> r.has_header("Spam-eggs")
True
>>> r.header_items()
[('Spam-eggs', 'blah')]
>>> r.add_header("Foo-Bar", "baz")
>>> items = sorted(r.header_items())
>>> items
[('Foo-bar', 'baz'), ('Spam-eggs', 'blah')]
Note that e.g. r.has_header("spam-EggS") is currently False, and
r.get_header("spam-EggS") returns None, but that could be changed in
future.
>>> r.has_header("Not-there")
False
>>> print(r.get_header("Not-there"))
None
>>> r.get_header("Not-there", "default")
'default'
"""
def test_password_manager(self):
"""
>>> mgr = urllib.request.HTTPPasswordMgr()
>>> add = mgr.add_password
>>> add("Some Realm", "http://example.com/", "joe", "password")
>>> add("Some Realm", "http://example.com/ni", "ni", "ni")
>>> add("c", "http://example.com/foo", "foo", "ni")
>>> add("c", "http://example.com/bar", "bar", "nini")
>>> add("b", "http://example.com/", "first", "blah")
>>> add("b", "http://example.com/", "second", "spam")
>>> add("a", "http://example.com", "1", "a")
>>> add("Some Realm", "http://c.example.com:3128", "3", "c")
>>> add("Some Realm", "d.example.com", "4", "d")
>>> add("Some Realm", "e.example.com:3128", "5", "e")
>>> mgr.find_user_password("Some Realm", "example.com")
('joe', 'password')
>>> mgr.find_user_password("Some Realm", "http://example.com")
('joe', 'password')
>>> mgr.find_user_password("Some Realm", "http://example.com/")
('joe', 'password')
>>> mgr.find_user_password("Some Realm", "http://example.com/spam")
('joe', 'password')
>>> mgr.find_user_password("Some Realm", "http://example.com/spam/spam")
('joe', 'password')
>>> mgr.find_user_password("c", "http://example.com/foo")
('foo', 'ni')
>>> mgr.find_user_password("c", "http://example.com/bar")
('bar', 'nini')
Actually, this is really undefined ATM
## Currently, we use the highest-level path where more than one match:
## >>> mgr.find_user_password("Some Realm", "http://example.com/ni")
## ('joe', 'password')
Use latest add_password() in case of conflict:
>>> mgr.find_user_password("b", "http://example.com/")
('second', 'spam')
No special relationship between a.example.com and example.com:
>>> mgr.find_user_password("a", "http://example.com/")
('1', 'a')
>>> mgr.find_user_password("a", "http://a.example.com/")
(None, None)
Ports:
>>> mgr.find_user_password("Some Realm", "c.example.com")
(None, None)
>>> mgr.find_user_password("Some Realm", "c.example.com:3128")
('3', 'c')
>>> mgr.find_user_password("Some Realm", "http://c.example.com:3128")
('3', 'c')
>>> mgr.find_user_password("Some Realm", "d.example.com")
('4', 'd')
>>> mgr.find_user_password("Some Realm", "e.example.com:3128")
('5', 'e')
"""
pass
def test_password_manager_default_port(self):
"""
>>> mgr = urllib.request.HTTPPasswordMgr()
>>> add = mgr.add_password
The point to note here is that we can't guess the default port if there's
no scheme. This applies to both add_password and find_user_password.
>>> add("f", "http://g.example.com:80", "10", "j")
>>> add("g", "http://h.example.com", "11", "k")
>>> add("h", "i.example.com:80", "12", "l")
>>> add("i", "j.example.com", "13", "m")
>>> mgr.find_user_password("f", "g.example.com:100")
(None, None)
>>> mgr.find_user_password("f", "g.example.com:80")
('10', 'j')
>>> mgr.find_user_password("f", "g.example.com")
(None, None)
>>> mgr.find_user_password("f", "http://g.example.com:100")
(None, None)
>>> mgr.find_user_password("f", "http://g.example.com:80")
('10', 'j')
>>> mgr.find_user_password("f", "http://g.example.com")
('10', 'j')
>>> mgr.find_user_password("g", "h.example.com")
('11', 'k')
>>> mgr.find_user_password("g", "h.example.com:80")
('11', 'k')
>>> mgr.find_user_password("g", "http://h.example.com:80")
('11', 'k')
>>> mgr.find_user_password("h", "i.example.com")
(None, None)
>>> mgr.find_user_password("h", "i.example.com:80")
('12', 'l')
>>> mgr.find_user_password("h", "http://i.example.com:80")
('12', 'l')
>>> mgr.find_user_password("i", "j.example.com")
('13', 'm')
>>> mgr.find_user_password("i", "j.example.com:80")
(None, None)
>>> mgr.find_user_password("i", "http://j.example.com")
('13', 'm')
>>> mgr.find_user_password("i", "http://j.example.com:80")
(None, None)
"""
class MockOpener:
addheaders = []
def open(self, req, data=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT):
self.req, self.data, self.timeout = req, data, timeout
def error(self, proto, *args):
self.proto, self.args = proto, args
class MockFile:
def read(self, count=None): pass
def readline(self, count=None): pass
def close(self): pass
class MockHeaders(dict):
def getheaders(self, name):
return list(self.values())
class MockResponse(io.StringIO):
def __init__(self, code, msg, headers, data, url=None):
io.StringIO.__init__(self, data)
self.code, self.msg, self.headers, self.url = code, msg, headers, url
def info(self):
return self.headers
def geturl(self):
return self.url
class MockCookieJar:
def add_cookie_header(self, request):
self.ach_req = request
def extract_cookies(self, response, request):
self.ec_req, self.ec_r = request, response
class FakeMethod:
def __init__(self, meth_name, action, handle):
self.meth_name = meth_name
self.handle = handle
self.action = action
def __call__(self, *args):
return self.handle(self.meth_name, self.action, *args)
class MockHTTPResponse(io.IOBase):
def __init__(self, fp, msg, status, reason):
self.fp = fp
self.msg = msg
self.status = status
self.reason = reason
self.code = 200
def read(self):
return ''
def info(self):
return {}
def geturl(self):
return self.url
class MockHTTPClass:
def __init__(self):
self.level = 0
self.req_headers = []
self.data = None
self.raise_on_endheaders = False
self._tunnel_headers = {}
def __call__(self, host, timeout=socket._GLOBAL_DEFAULT_TIMEOUT):
self.host = host
self.timeout = timeout
return self
def set_debuglevel(self, level):
self.level = level
def _set_tunnel(self, host, port=None, headers=None):
self._tunnel_host = host
self._tunnel_port = port
if headers:
self._tunnel_headers = headers
else:
self._tunnel_headers.clear()
def request(self, method, url, body=None, headers=None):
self.method = method
self.selector = url
if headers is not None:
self.req_headers += headers.items()
self.req_headers.sort()
if body:
self.data = body
if self.raise_on_endheaders:
import socket
raise socket.error()
def getresponse(self):
return MockHTTPResponse(MockFile(), {}, 200, "OK")
class MockHandler:
# useful for testing handler machinery
# see add_ordered_mock_handlers() docstring
handler_order = 500
def __init__(self, methods):
self._define_methods(methods)
def _define_methods(self, methods):
for spec in methods:
if len(spec) == 2: name, action = spec
else: name, action = spec, None
meth = FakeMethod(name, action, self.handle)
setattr(self.__class__, name, meth)
def handle(self, fn_name, action, *args, **kwds):
self.parent.calls.append((self, fn_name, args, kwds))
if action is None:
return None
elif action == "return self":
return self
elif action == "return response":
res = MockResponse(200, "OK", {}, "")
return res
elif action == "return request":
return Request("http://blah/")
elif action.startswith("error"):
code = action[action.rfind(" ")+1:]
try:
code = int(code)
except ValueError:
pass
res = MockResponse(200, "OK", {}, "")
return self.parent.error("http", args[0], res, code, "", {})
elif action == "raise":
raise urllib.error.URLError("blah")
assert False
def close(self): pass
def add_parent(self, parent):
self.parent = parent
self.parent.calls = []
def __lt__(self, other):
if not hasattr(other, "handler_order"):
# No handler_order, leave in original order. Yuck.
return True
return self.handler_order < other.handler_order
def add_ordered_mock_handlers(opener, meth_spec):
"""Create MockHandlers and add them to an OpenerDirector.
meth_spec: list of lists of tuples and strings defining methods to define
on handlers. eg:
[["http_error", "ftp_open"], ["http_open"]]
defines methods .http_error() and .ftp_open() on one handler, and
.http_open() on another. These methods just record their arguments and
return None. Using a tuple instead of a string causes the method to
perform some action (see MockHandler.handle()), eg:
[["http_error"], [("http_open", "return request")]]
defines .http_error() on one handler (which simply returns None), and
.http_open() on another handler, which returns a Request object.
"""
handlers = []
count = 0
for meths in meth_spec:
class MockHandlerSubclass(MockHandler): pass
h = MockHandlerSubclass(meths)
h.handler_order += count
h.add_parent(opener)
count = count + 1
handlers.append(h)
opener.add_handler(h)
return handlers
def build_test_opener(*handler_instances):
opener = OpenerDirector()
for h in handler_instances:
opener.add_handler(h)
return opener
class MockHTTPHandler(urllib.request.BaseHandler):
# useful for testing redirections and auth
# sends supplied headers and code as first response
# sends 200 OK as second response
def __init__(self, code, headers):
self.code = code
self.headers = headers
self.reset()
def reset(self):
self._count = 0
self.requests = []
def http_open(self, req):
import email, http.client, copy
from io import StringIO
self.requests.append(copy.deepcopy(req))
if self._count == 0:
self._count = self._count + 1
name = http.client.responses[self.code]
msg = email.message_from_string(self.headers)
return self.parent.error(
"http", req, MockFile(), self.code, name, msg)
else:
self.req = req
msg = email.message_from_string("\r\n\r\n")
return MockResponse(200, "OK", msg, "", req.get_full_url())
class MockHTTPSHandler(urllib.request.AbstractHTTPHandler):
# Useful for testing the Proxy-Authorization request by verifying the
# properties of httpcon
def __init__(self):
urllib.request.AbstractHTTPHandler.__init__(self)
self.httpconn = MockHTTPClass()
def https_open(self, req):
return self.do_open(self.httpconn, req)
class MockPasswordManager:
def add_password(self, realm, uri, user, password):
self.realm = realm
self.url = uri
self.user = user
self.password = password
def find_user_password(self, realm, authuri):
self.target_realm = realm
self.target_url = authuri
return self.user, self.password
class OpenerDirectorTests(unittest.TestCase):
def test_add_non_handler(self):
class NonHandler(object):
pass
self.assertRaises(TypeError,
OpenerDirector().add_handler, NonHandler())
def test_badly_named_methods(self):
# test work-around for three methods that accidentally follow the
# naming conventions for handler methods
# (*_open() / *_request() / *_response())
# These used to call the accidentally-named methods, causing a
# TypeError in real code; here, returning self from these mock
# methods would either cause no exception, or AttributeError.
from urllib.error import URLError
o = OpenerDirector()
meth_spec = [
[("do_open", "return self"), ("proxy_open", "return self")],
[("redirect_request", "return self")],
]
handlers = add_ordered_mock_handlers(o, meth_spec)
o.add_handler(urllib.request.UnknownHandler())
for scheme in "do", "proxy", "redirect":
self.assertRaises(URLError, o.open, scheme+"://example.com/")
def test_handled(self):
# handler returning non-None means no more handlers will be called
o = OpenerDirector()
meth_spec = [
["http_open", "ftp_open", "http_error_302"],
["ftp_open"],
[("http_open", "return self")],
[("http_open", "return self")],
]
handlers = add_ordered_mock_handlers(o, meth_spec)
req = Request("http://example.com/")
r = o.open(req)
# Second .http_open() gets called, third doesn't, since second returned
# non-None. Handlers without .http_open() never get any methods called
# on them.
# In fact, second mock handler defining .http_open() returns self
# (instead of response), which becomes the OpenerDirector's return
# value.
self.assertEqual(r, handlers[2])
calls = [(handlers[0], "http_open"), (handlers[2], "http_open")]
for expected, got in zip(calls, o.calls):
handler, name, args, kwds = got
self.assertEqual((handler, name), expected)
self.assertEqual(args, (req,))
def test_handler_order(self):
o = OpenerDirector()
handlers = []
for meths, handler_order in [
([("http_open", "return self")], 500),
(["http_open"], 0),
]:
class MockHandlerSubclass(MockHandler): pass
h = MockHandlerSubclass(meths)
h.handler_order = handler_order
handlers.append(h)
o.add_handler(h)
r = o.open("http://example.com/")
# handlers called in reverse order, thanks to their sort order
self.assertEqual(o.calls[0][0], handlers[1])
self.assertEqual(o.calls[1][0], handlers[0])
def test_raise(self):
# raising URLError stops processing of request
o = OpenerDirector()
meth_spec = [
[("http_open", "raise")],
[("http_open", "return self")],
]
handlers = add_ordered_mock_handlers(o, meth_spec)
req = Request("http://example.com/")
self.assertRaises(urllib.error.URLError, o.open, req)
self.assertEqual(o.calls, [(handlers[0], "http_open", (req,), {})])
## def test_error(self):
## # XXX this doesn't actually seem to be used in standard library,
## # but should really be tested anyway...
def test_http_error(self):
# XXX http_error_default
# http errors are a special case
o = OpenerDirector()
meth_spec = [
[("http_open", "error 302")],
[("http_error_400", "raise"), "http_open"],
[("http_error_302", "return response"), "http_error_303",
"http_error"],
[("http_error_302")],
]
handlers = add_ordered_mock_handlers(o, meth_spec)
class Unknown:
def __eq__(self, other): return True
req = Request("http://example.com/")
r = o.open(req)
assert len(o.calls) == 2
calls = [(handlers[0], "http_open", (req,)),
(handlers[2], "http_error_302",
(req, Unknown(), 302, "", {}))]
for expected, got in zip(calls, o.calls):
handler, method_name, args = expected
self.assertEqual((handler, method_name), got[:2])
self.assertEqual(args, got[2])
def test_processors(self):
# *_request / *_response methods get called appropriately
o = OpenerDirector()
meth_spec = [
[("http_request", "return request"),
("http_response", "return response")],
[("http_request", "return request"),
("http_response", "return response")],
]
handlers = add_ordered_mock_handlers(o, meth_spec)
req = Request("http://example.com/")
r = o.open(req)
# processor methods are called on *all* handlers that define them,
# not just the first handler that handles the request
calls = [
(handlers[0], "http_request"), (handlers[1], "http_request"),
(handlers[0], "http_response"), (handlers[1], "http_response")]
for i, (handler, name, args, kwds) in enumerate(o.calls):
if i < 2:
# *_request
self.assertEqual((handler, name), calls[i])
self.assertEqual(len(args), 1)
self.assertTrue(isinstance(args[0], Request))
else:
# *_response
self.assertEqual((handler, name), calls[i])
self.assertEqual(len(args), 2)
self.assertTrue(isinstance(args[0], Request))
# response from opener.open is None, because there's no
# handler that defines http_open to handle it
self.assertTrue(args[1] is None or
isinstance(args[1], MockResponse))
def sanepathname2url(path):
urlpath = urllib.request.pathname2url(path)
if os.name == "nt" and urlpath.startswith("///"):
urlpath = urlpath[2:]
# XXX don't ask me about the mac...
return urlpath
class HandlerTests(unittest.TestCase):
def test_ftp(self):
class MockFTPWrapper:
def __init__(self, data): self.data = data
def retrfile(self, filename, filetype):
self.filename, self.filetype = filename, filetype
return io.StringIO(self.data), len(self.data)
class NullFTPHandler(urllib.request.FTPHandler):
def __init__(self, data): self.data = data
def connect_ftp(self, user, passwd, host, port, dirs,
timeout=socket._GLOBAL_DEFAULT_TIMEOUT):
self.user, self.passwd = user, passwd
self.host, self.port = host, port
self.dirs = dirs
self.ftpwrapper = MockFTPWrapper(self.data)
return self.ftpwrapper
import ftplib
data = "rheum rhaponicum"
h = NullFTPHandler(data)
o = h.parent = MockOpener()
for url, host, port, user, passwd, type_, dirs, filename, mimetype in [
("ftp://localhost/foo/bar/baz.html",
"localhost", ftplib.FTP_PORT, "", "", "I",
["foo", "bar"], "baz.html", "text/html"),
("ftp://parrot@localhost/foo/bar/baz.html",
"localhost", ftplib.FTP_PORT, "parrot", "", "I",
["foo", "bar"], "baz.html", "text/html"),
("ftp://%25parrot@localhost/foo/bar/baz.html",
"localhost", ftplib.FTP_PORT, "%parrot", "", "I",
["foo", "bar"], "baz.html", "text/html"),
("ftp://%2542parrot@localhost/foo/bar/baz.html",
"localhost", ftplib.FTP_PORT, "%42parrot", "", "I",
["foo", "bar"], "baz.html", "text/html"),
("ftp://localhost:80/foo/bar/",
"localhost", 80, "", "", "D",
["foo", "bar"], "", None),
("ftp://localhost/baz.gif;type=a",
"localhost", ftplib.FTP_PORT, "", "", "A",
[], "baz.gif", None), # XXX really this should guess image/gif
]:
req = Request(url)
req.timeout = None
r = h.ftp_open(req)
# ftp authentication not yet implemented by FTPHandler
self.assertEqual(h.user, user)
self.assertEqual(h.passwd, passwd)
self.assertEqual(h.host, socket.gethostbyname(host))
self.assertEqual(h.port, port)
self.assertEqual(h.dirs, dirs)
self.assertEqual(h.ftpwrapper.filename, filename)
self.assertEqual(h.ftpwrapper.filetype, type_)
headers = r.info()
self.assertEqual(headers.get("Content-type"), mimetype)
self.assertEqual(int(headers["Content-length"]), len(data))
def test_file(self):
import email.utils, socket
h = urllib.request.FileHandler()
o = h.parent = MockOpener()
TESTFN = support.TESTFN
urlpath = sanepathname2url(os.path.abspath(TESTFN))
towrite = b"hello, world\n"
urls = [
"file://localhost%s" % urlpath,
"file://%s" % urlpath,
"file://%s%s" % (socket.gethostbyname('localhost'), urlpath),
]
try:
localaddr = socket.gethostbyname(socket.gethostname())
except socket.gaierror:
localaddr = ''
if localaddr:
urls.append("file://%s%s" % (localaddr, urlpath))
for url in urls:
f = open(TESTFN, "wb")
try:
try:
f.write(towrite)
finally:
f.close()
r = h.file_open(Request(url))
try:
data = r.read()
headers = r.info()
respurl = r.geturl()
finally:
r.close()
stats = os.stat(TESTFN)
modified = email.utils.formatdate(stats.st_mtime, usegmt=True)
finally:
os.remove(TESTFN)
self.assertEqual(data, towrite)
self.assertEqual(headers["Content-type"], "text/plain")
self.assertEqual(headers["Content-length"], "13")
self.assertEqual(headers["Last-modified"], modified)
self.assertEqual(respurl, url)
for url in [
"file://localhost:80%s" % urlpath,
"file:///file_does_not_exist.txt",
"file://%s:80%s/%s" % (socket.gethostbyname('localhost'),
os.getcwd(), TESTFN),
"file://somerandomhost.ontheinternet.com%s/%s" %
(os.getcwd(), TESTFN),
]:
try:
f = open(TESTFN, "wb")
try:
f.write(towrite)
finally:
f.close()
self.assertRaises(urllib.error.URLError,
h.file_open, Request(url))
finally:
os.remove(TESTFN)
h = urllib.request.FileHandler()
o = h.parent = MockOpener()
# XXXX why does // mean ftp (and /// mean not ftp!), and where
# is file: scheme specified? I think this is really a bug, and
# what was intended was to distinguish between URLs like:
# file:/blah.txt (a file)
# file://localhost/blah.txt (a file)
# file:///blah.txt (a file)
# file://ftp.example.com/blah.txt (an ftp URL)
for url, ftp in [
("file://ftp.example.com//foo.txt", True),
("file://ftp.example.com///foo.txt", False),
# XXXX bug: fails with OSError, should be URLError
("file://ftp.example.com/foo.txt", False),
("file://somehost//foo/something.txt", True),
("file://localhost//foo/something.txt", False),
]:
req = Request(url)
try:
h.file_open(req)
# XXXX remove OSError when bug fixed
except (urllib.error.URLError, OSError):
self.assertFalse(ftp)
else:
self.assertIs(o.req, req)
self.assertEqual(req.type, "ftp")
self.assertEqual(req.type is "ftp", ftp)
def test_http(self):
h = urllib.request.AbstractHTTPHandler()
o = h.parent = MockOpener()
url = "http://example.com/"
for method, data in [("GET", None), ("POST", "blah")]:
req = Request(url, data, {"Foo": "bar"})
req.timeout = None
req.add_unredirected_header("Spam", "eggs")
http = MockHTTPClass()
r = h.do_open(http, req)
# result attributes
r.read; r.readline # wrapped MockFile methods
r.info; r.geturl # addinfourl methods
r.code, r.msg == 200, "OK" # added from MockHTTPClass.getreply()
hdrs = r.info()
hdrs.get; hdrs.__contains__ # r.info() gives dict from .getreply()
self.assertEqual(r.geturl(), url)
self.assertEqual(http.host, "example.com")
self.assertEqual(http.level, 0)
self.assertEqual(http.method, method)
self.assertEqual(http.selector, "/")
self.assertEqual(http.req_headers,
[("Connection", "close"),
("Foo", "bar"), ("Spam", "eggs")])
self.assertEqual(http.data, data)
# check socket.error converted to URLError
http.raise_on_endheaders = True
self.assertRaises(urllib.error.URLError, h.do_open, http, req)
# check adding of standard headers
o.addheaders = [("Spam", "eggs")]
for data in "", None: # POST, GET
req = Request("http://example.com/", data)
r = MockResponse(200, "OK", {}, "")
newreq = h.do_request_(req)
if data is None: # GET
self.assertTrue("Content-length" not in req.unredirected_hdrs)
self.assertTrue("Content-type" not in req.unredirected_hdrs)
else: # POST
self.assertEqual(req.unredirected_hdrs["Content-length"], "0")
self.assertEqual(req.unredirected_hdrs["Content-type"],
"application/x-www-form-urlencoded")
# XXX the details of Host could be better tested
self.assertEqual(req.unredirected_hdrs["Host"], "example.com")
self.assertEqual(req.unredirected_hdrs["Spam"], "eggs")
# don't clobber existing headers
req.add_unredirected_header("Content-length", "foo")
req.add_unredirected_header("Content-type", "bar")
req.add_unredirected_header("Host", "baz")
req.add_unredirected_header("Spam", "foo")
newreq = h.do_request_(req)
self.assertEqual(req.unredirected_hdrs["Content-length"], "foo")
self.assertEqual(req.unredirected_hdrs["Content-type"], "bar")
self.assertEqual(req.unredirected_hdrs["Host"], "baz")
self.assertEqual(req.unredirected_hdrs["Spam"], "foo")
def test_http_doubleslash(self):
# Checks the presence of any unnecessary double slash in url does not
# break anything. Previously, a double slash directly after the host
# could could cause incorrect parsing.
h = urllib.request.AbstractHTTPHandler()
o = h.parent = MockOpener()
data = ""
ds_urls = [
"http://example.com/foo/bar/baz.html",
"http://example.com//foo/bar/baz.html",
"http://example.com/foo//bar/baz.html",
"http://example.com/foo/bar//baz.html"
]
for ds_url in ds_urls:
ds_req = Request(ds_url, data)
# Check whether host is determined correctly if there is no proxy
np_ds_req = h.do_request_(ds_req)
self.assertEqual(np_ds_req.unredirected_hdrs["Host"],"example.com")
# Check whether host is determined correctly if there is a proxy
ds_req.set_proxy("someproxy:3128",None)
p_ds_req = h.do_request_(ds_req)
self.assertEqual(p_ds_req.unredirected_hdrs["Host"],"example.com")
def test_fixpath_in_weirdurls(self):
# Issue4493: urllib2 to supply '/' when to urls where path does not
# start with'/'
h = urllib.request.AbstractHTTPHandler()
o = h.parent = MockOpener()
weird_url = 'http://www.python.org?getspam'
req = Request(weird_url)
newreq = h.do_request_(req)
self.assertEqual(newreq.host,'www.python.org')
self.assertEqual(newreq.selector,'/?getspam')
url_without_path = 'http://www.python.org'
req = Request(url_without_path)
newreq = h.do_request_(req)
self.assertEqual(newreq.host,'www.python.org')
self.assertEqual(newreq.selector,'')
def test_errors(self):
h = urllib.request.HTTPErrorProcessor()
o = h.parent = MockOpener()
url = "http://example.com/"
req = Request(url)
# all 2xx are passed through
r = MockResponse(200, "OK", {}, "", url)
newr = h.http_response(req, r)
self.assertIs(r, newr)
self.assertFalse(hasattr(o, "proto")) # o.error not called
r = MockResponse(202, "Accepted", {}, "", url)
newr = h.http_response(req, r)
self.assertIs(r, newr)
self.assertFalse(hasattr(o, "proto")) # o.error not called
r = MockResponse(206, "Partial content", {}, "", url)
newr = h.http_response(req, r)
self.assertIs(r, newr)
self.assertFalse(hasattr(o, "proto")) # o.error not called
# anything else calls o.error (and MockOpener returns None, here)
r = MockResponse(502, "Bad gateway", {}, "", url)
self.assertIsNone(h.http_response(req, r))
self.assertEqual(o.proto, "http") # o.error called
self.assertEqual(o.args, (req, r, 502, "Bad gateway", {}))
def test_cookies(self):
cj = MockCookieJar()
h = urllib.request.HTTPCookieProcessor(cj)
o = h.parent = MockOpener()
req = Request("http://example.com/")
r = MockResponse(200, "OK", {}, "")
newreq = h.http_request(req)
self.assertIs(cj.ach_req, req)
self.assertIs(cj.ach_req, newreq)
self.assertEqual(req.get_origin_req_host(), "example.com")
self.assertFalse(req.is_unverifiable())
newr = h.http_response(req, r)
self.assertIs(cj.ec_req, req)
self.assertIs(cj.ec_r, r)
self.assertIs(r, newr)
def test_redirect(self):
from_url = "http://example.com/a.html"
to_url = "http://example.com/b.html"
h = urllib.request.HTTPRedirectHandler()
o = h.parent = MockOpener()
# ordinary redirect behaviour
for code in 301, 302, 303, 307:
for data in None, "blah\nblah\n":
method = getattr(h, "http_error_%s" % code)
req = Request(from_url, data)
req.timeout = socket._GLOBAL_DEFAULT_TIMEOUT
req.add_header("Nonsense", "viking=withhold")
if data is not None:
req.add_header("Content-Length", str(len(data)))
req.add_unredirected_header("Spam", "spam")
try:
method(req, MockFile(), code, "Blah",
MockHeaders({"location": to_url}))
except urllib.error.HTTPError:
# 307 in response to POST requires user OK
self.assertTrue(code == 307 and data is not None)
self.assertEqual(o.req.get_full_url(), to_url)
try:
self.assertEqual(o.req.get_method(), "GET")
except AttributeError:
self.assertFalse(o.req.has_data())
# now it's a GET, there should not be headers regarding content
# (possibly dragged from before being a POST)
headers = [x.lower() for x in o.req.headers]
self.assertTrue("content-length" not in headers)
self.assertTrue("content-type" not in headers)
self.assertEqual(o.req.headers["Nonsense"],
"viking=withhold")
self.assertTrue("Spam" not in o.req.headers)
self.assertTrue("Spam" not in o.req.unredirected_hdrs)
# loop detection
req = Request(from_url)
req.timeout = socket._GLOBAL_DEFAULT_TIMEOUT
def redirect(h, req, url=to_url):
h.http_error_302(req, MockFile(), 302, "Blah",
MockHeaders({"location": url}))
# Note that the *original* request shares the same record of
# redirections with the sub-requests caused by the redirections.
# detect infinite loop redirect of a URL to itself
req = Request(from_url, origin_req_host="example.com")
count = 0
req.timeout = socket._GLOBAL_DEFAULT_TIMEOUT
try:
while 1:
redirect(h, req, "http://example.com/")
count = count + 1
except urllib.error.HTTPError:
# don't stop until max_repeats, because cookies may introduce state
self.assertEqual(count, urllib.request.HTTPRedirectHandler.max_repeats)
# detect endless non-repeating chain of redirects
req = Request(from_url, origin_req_host="example.com")
count = 0
req.timeout = socket._GLOBAL_DEFAULT_TIMEOUT
try:
while 1:
redirect(h, req, "http://example.com/%d" % count)
count = count + 1
except urllib.error.HTTPError:
self.assertEqual(count,
urllib.request.HTTPRedirectHandler.max_redirections)
def test_cookie_redirect(self):
# cookies shouldn't leak into redirected requests
from http.cookiejar import CookieJar
from test.test_http_cookiejar import interact_netscape
cj = CookieJar()
interact_netscape(cj, "http://www.example.com/", "spam=eggs")
hh = MockHTTPHandler(302, "Location: http://www.cracker.com/\r\n\r\n")
hdeh = urllib.request.HTTPDefaultErrorHandler()
hrh = urllib.request.HTTPRedirectHandler()
cp = urllib.request.HTTPCookieProcessor(cj)
o = build_test_opener(hh, hdeh, hrh, cp)
o.open("http://www.example.com/")
self.assertFalse(hh.req.has_header("Cookie"))
def test_proxy(self):
o = OpenerDirector()
ph = urllib.request.ProxyHandler(dict(http="proxy.example.com:3128"))
o.add_handler(ph)
meth_spec = [
[("http_open", "return response")]
]
handlers = add_ordered_mock_handlers(o, meth_spec)
req = Request("http://acme.example.com/")
self.assertEqual(req.get_host(), "acme.example.com")
r = o.open(req)
self.assertEqual(req.get_host(), "proxy.example.com:3128")
self.assertEqual([(handlers[0], "http_open")],
[tup[0:2] for tup in o.calls])
def test_proxy_no_proxy(self):
os.environ['no_proxy'] = 'python.org'
o = OpenerDirector()
ph = urllib.request.ProxyHandler(dict(http="proxy.example.com"))
o.add_handler(ph)
req = Request("http://www.perl.org/")
self.assertEqual(req.get_host(), "www.perl.org")
r = o.open(req)
self.assertEqual(req.get_host(), "proxy.example.com")
req = Request("http://www.python.org")
self.assertEqual(req.get_host(), "www.python.org")
r = o.open(req)
self.assertEqual(req.get_host(), "www.python.org")
del os.environ['no_proxy']
def test_proxy_https(self):
o = OpenerDirector()
ph = urllib.request.ProxyHandler(dict(https="proxy.example.com:3128"))
o.add_handler(ph)
meth_spec = [
[("https_open", "return response")]
]
handlers = add_ordered_mock_handlers(o, meth_spec)
req = Request("https://www.example.com/")
self.assertEqual(req.get_host(), "www.example.com")
r = o.open(req)
self.assertEqual(req.get_host(), "proxy.example.com:3128")
self.assertEqual([(handlers[0], "https_open")],
[tup[0:2] for tup in o.calls])
def test_proxy_https_proxy_authorization(self):
o = OpenerDirector()
ph = urllib.request.ProxyHandler(dict(https='proxy.example.com:3128'))
o.add_handler(ph)
https_handler = MockHTTPSHandler()
o.add_handler(https_handler)
req = Request("https://www.example.com/")
req.add_header("Proxy-Authorization","FooBar")
req.add_header("User-Agent","Grail")
self.assertEqual(req.get_host(), "www.example.com")
self.assertIsNone(req._tunnel_host)
r = o.open(req)
# Verify Proxy-Authorization gets tunneled to request.
# httpsconn req_headers do not have the Proxy-Authorization header but
# the req will have.
self.assertFalse(("Proxy-Authorization","FooBar") in
https_handler.httpconn.req_headers)
self.assertTrue(("User-Agent","Grail") in
https_handler.httpconn.req_headers)
self.assertIsNotNone(req._tunnel_host)
self.assertEqual(req.get_host(), "proxy.example.com:3128")
self.assertEqual(req.get_header("Proxy-authorization"),"FooBar")
def test_basic_auth(self, quote_char='"'):
opener = OpenerDirector()
password_manager = MockPasswordManager()
auth_handler = urllib.request.HTTPBasicAuthHandler(password_manager)
realm = "ACME Widget Store"
http_handler = MockHTTPHandler(
401, 'WWW-Authenticate: Basic realm=%s%s%s\r\n\r\n' %
(quote_char, realm, quote_char) )
opener.add_handler(auth_handler)
opener.add_handler(http_handler)
self._test_basic_auth(opener, auth_handler, "Authorization",
realm, http_handler, password_manager,
"http://acme.example.com/protected",
"http://acme.example.com/protected",
)
def test_basic_auth_with_single_quoted_realm(self):
self.test_basic_auth(quote_char="'")
def test_proxy_basic_auth(self):
opener = OpenerDirector()
ph = urllib.request.ProxyHandler(dict(http="proxy.example.com:3128"))
opener.add_handler(ph)
password_manager = MockPasswordManager()
auth_handler = urllib.request.ProxyBasicAuthHandler(password_manager)
realm = "ACME Networks"
http_handler = MockHTTPHandler(
407, 'Proxy-Authenticate: Basic realm="%s"\r\n\r\n' % realm)
opener.add_handler(auth_handler)
opener.add_handler(http_handler)
self._test_basic_auth(opener, auth_handler, "Proxy-authorization",
realm, http_handler, password_manager,
"http://acme.example.com:3128/protected",
"proxy.example.com:3128",
)
def test_basic_and_digest_auth_handlers(self):
# HTTPDigestAuthHandler threw an exception if it couldn't handle a 40*
# response (http://python.org/sf/1479302), where it should instead
# return None to allow another handler (especially
# HTTPBasicAuthHandler) to handle the response.
# Also (http://python.org/sf/14797027, RFC 2617 section 1.2), we must
# try digest first (since it's the strongest auth scheme), so we record
# order of calls here to check digest comes first:
class RecordingOpenerDirector(OpenerDirector):
def __init__(self):
OpenerDirector.__init__(self)
self.recorded = []
def record(self, info):
self.recorded.append(info)
class TestDigestAuthHandler(urllib.request.HTTPDigestAuthHandler):
def http_error_401(self, *args, **kwds):
self.parent.record("digest")
urllib.request.HTTPDigestAuthHandler.http_error_401(self,
*args, **kwds)
class TestBasicAuthHandler(urllib.request.HTTPBasicAuthHandler):
def http_error_401(self, *args, **kwds):
self.parent.record("basic")
urllib.request.HTTPBasicAuthHandler.http_error_401(self,
*args, **kwds)
opener = RecordingOpenerDirector()
password_manager = MockPasswordManager()
digest_handler = TestDigestAuthHandler(password_manager)
basic_handler = TestBasicAuthHandler(password_manager)
realm = "ACME Networks"
http_handler = MockHTTPHandler(
401, 'WWW-Authenticate: Basic realm="%s"\r\n\r\n' % realm)
opener.add_handler(basic_handler)
opener.add_handler(digest_handler)
opener.add_handler(http_handler)
# check basic auth isn't blocked by digest handler failing
self._test_basic_auth(opener, basic_handler, "Authorization",
realm, http_handler, password_manager,
"http://acme.example.com/protected",
"http://acme.example.com/protected",
)
# check digest was tried before basic (twice, because
# _test_basic_auth called .open() twice)
self.assertEqual(opener.recorded, ["digest", "basic"]*2)
def _test_basic_auth(self, opener, auth_handler, auth_header,
realm, http_handler, password_manager,
request_url, protected_url):
import base64
user, password = "wile", "coyote"
# .add_password() fed through to password manager
auth_handler.add_password(realm, request_url, user, password)
self.assertEqual(realm, password_manager.realm)
self.assertEqual(request_url, password_manager.url)
self.assertEqual(user, password_manager.user)
self.assertEqual(password, password_manager.password)
r = opener.open(request_url)
# should have asked the password manager for the username/password
self.assertEqual(password_manager.target_realm, realm)
self.assertEqual(password_manager.target_url, protected_url)
# expect one request without authorization, then one with
self.assertEqual(len(http_handler.requests), 2)
self.assertFalse(http_handler.requests[0].has_header(auth_header))
userpass = bytes('%s:%s' % (user, password), "ascii")
auth_hdr_value = ('Basic ' +
base64.encodebytes(userpass).strip().decode())
self.assertEqual(http_handler.requests[1].get_header(auth_header),
auth_hdr_value)
self.assertEqual(http_handler.requests[1].unredirected_hdrs[auth_header],
auth_hdr_value)
# if the password manager can't find a password, the handler won't
# handle the HTTP auth error
password_manager.user = password_manager.password = None
http_handler.reset()
r = opener.open(request_url)
self.assertEqual(len(http_handler.requests), 1)
self.assertFalse(http_handler.requests[0].has_header(auth_header))
class MiscTests(unittest.TestCase):
def test_build_opener(self):
class MyHTTPHandler(urllib.request.HTTPHandler): pass
class FooHandler(urllib.request.BaseHandler):
def foo_open(self): pass
class BarHandler(urllib.request.BaseHandler):
def bar_open(self): pass
build_opener = urllib.request.build_opener
o = build_opener(FooHandler, BarHandler)
self.opener_has_handler(o, FooHandler)
self.opener_has_handler(o, BarHandler)
# can take a mix of classes and instances
o = build_opener(FooHandler, BarHandler())
self.opener_has_handler(o, FooHandler)
self.opener_has_handler(o, BarHandler)
# subclasses of default handlers override default handlers
o = build_opener(MyHTTPHandler)
self.opener_has_handler(o, MyHTTPHandler)
# a particular case of overriding: default handlers can be passed
# in explicitly
o = build_opener()
self.opener_has_handler(o, urllib.request.HTTPHandler)
o = build_opener(urllib.request.HTTPHandler)
self.opener_has_handler(o, urllib.request.HTTPHandler)
o = build_opener(urllib.request.HTTPHandler())
self.opener_has_handler(o, urllib.request.HTTPHandler)
# Issue2670: multiple handlers sharing the same base class
class MyOtherHTTPHandler(urllib.request.HTTPHandler): pass
o = build_opener(MyHTTPHandler, MyOtherHTTPHandler)
self.opener_has_handler(o, MyHTTPHandler)
self.opener_has_handler(o, MyOtherHTTPHandler)
def opener_has_handler(self, opener, handler_class):
self.assertTrue(any(h.__class__ == handler_class
for h in opener.handlers))
class RequestTests(unittest.TestCase):
def setUp(self):
self.get = Request("http://www.python.org/~jeremy/")
self.post = Request("http://www.python.org/~jeremy/",
"data",
headers={"X-Test": "test"})
def test_method(self):
self.assertEqual("POST", self.post.get_method())
self.assertEqual("GET", self.get.get_method())
def test_add_data(self):
self.assertFalse(self.get.has_data())
self.assertEqual("GET", self.get.get_method())
self.get.add_data("spam")
self.assertTrue(self.get.has_data())
self.assertEqual("POST", self.get.get_method())
def test_get_full_url(self):
self.assertEqual("http://www.python.org/~jeremy/",
self.get.get_full_url())
def test_selector(self):
self.assertEqual("/~jeremy/", self.get.get_selector())
req = Request("http://www.python.org/")
self.assertEqual("/", req.get_selector())
def test_get_type(self):
self.assertEqual("http", self.get.get_type())
def test_get_host(self):
self.assertEqual("www.python.org", self.get.get_host())
def test_get_host_unquote(self):
req = Request("http://www.%70ython.org/")
self.assertEqual("www.python.org", req.get_host())
def test_proxy(self):
self.assertFalse(self.get.has_proxy())
self.get.set_proxy("www.perl.org", "http")
self.assertTrue(self.get.has_proxy())
self.assertEqual("www.python.org", self.get.get_origin_req_host())
self.assertEqual("www.perl.org", self.get.get_host())
def test_wrapped_url(self):
req = Request("<URL:http://www.python.org>")
self.assertEqual("www.python.org", req.get_host())
def test_urlwith_fragment(self):
req = Request("http://www.python.org/?qs=query#fragment=true")
self.assertEqual("/?qs=query", req.get_selector())
req = Request("http://www.python.org/#fun=true")
self.assertEqual("/", req.get_selector())
def test_main(verbose=None):
from test import test_urllib2
support.run_doctest(test_urllib2, verbose)
support.run_doctest(urllib.request, verbose)
tests = (TrivialTests,
OpenerDirectorTests,
HandlerTests,
MiscTests,
RequestTests)
support.run_unittest(*tests)
if __name__ == "__main__":
test_main(verbose=True)
| mancoast/CPythonPyc_test | fail/313_test_urllib2.py | Python | gpl-3.0 | 51,087 |
#!/usr/bin/env python
# File written by pyctools-editor. Do not edit.
import argparse
import logging
from pyctools.core.compound import Compound
import pyctools.components.arithmetic
import pyctools.components.qt.qtdisplay
import pyctools.components.zone.zoneplategenerator
class Network(object):
components = \
{ 'clipper': { 'class': 'pyctools.components.arithmetic.Arithmetic',
'config': "{'func': '16+((data > 180)*219)'}",
'pos': (200.0, 200.0)},
'clipper2': { 'class': 'pyctools.components.arithmetic.Arithmetic',
'config': "{'func': '16+((data > 230)*219)'}",
'pos': (200.0, 330.0)},
'qd': { 'class': 'pyctools.components.qt.qtdisplay.QtDisplay',
'config': "{'framerate': 60}",
'pos': (460.0, 200.0)},
'stacker': { 'class': 'pyctools.components.arithmetic.Arithmetic2',
'config': "{'func': 'numpy.vstack((data1,data2))'}",
'pos': (330.0, 200.0)},
'zpg': { 'class': 'pyctools.components.zone.zoneplategenerator.ZonePlateGenerator',
'config': "{'kx': 0.04, 'kt': -0.34, 'xlen': 600, 'ylen': "
"400, 'zlen': 1000, 'looping': 'repeat'}",
'pos': (70.0, 200.0)},
'zpg2': { 'class': 'pyctools.components.zone.zoneplategenerator.ZonePlateGenerator',
'config': "{'kx': 0.002, 'kt': -0.017, 'xlen': 600, 'ylen': "
"200, 'zlen': 1000, 'looping': 'repeat'}",
'pos': (70.0, 330.0)}}
linkages = \
{ ('clipper', 'output'): [('stacker', 'input1')],
('clipper2', 'output'): [('stacker', 'input2')],
('stacker', 'output'): [('qd', 'input')],
('zpg', 'output'): [('clipper', 'input')],
('zpg2', 'output'): [('clipper2', 'input')]}
def make(self):
comps = {}
for name, component in self.components.items():
comps[name] = eval(component['class'])(config=eval(component['config']))
return Compound(linkages=self.linkages, **comps)
if __name__ == '__main__':
from PyQt5 import QtCore, QtWidgets
QtWidgets.QApplication.setAttribute(QtCore.Qt.AA_X11InitThreads)
app = QtWidgets.QApplication([])
comp = Network().make()
cnf = comp.get_config()
parser = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
cnf.parser_add(parser)
parser.add_argument('-v', '--verbose', action='count', default=0,
help='increase verbosity of log messages')
args = parser.parse_args()
logging.basicConfig(level=logging.ERROR - (args.verbose * 10))
del args.verbose
cnf.parser_set(args)
comp.set_config(cnf)
comp.start()
app.exec_()
comp.stop()
comp.join()
| jim-easterbrook/pyctools-demo | src/scripts/temporal_alias/stage_2.py | Python | gpl-3.0 | 2,808 |
# -*- coding: utf-8 -*-
import itertools
"""
Languages | ShortCode | Wordnet
Albanian | sq | als
Arabic | ar | arb
Bulgarian | bg | bul
Catalan | ca | cat
Chinese | zh | cmn
Chinese (Taiwan) | qn | qcn
Greek | el | ell
Basque | eu | eus
Persian | fa | fas
Finish | fi | fin
French | fr | fra
Galician | gl | glg
Hebrew | he | heb
Croatian | hr | hrv
Indonesian | id | ind
Italian | it | ita
Japanese | ja | jpn
Norwegian NyNorsk | nn | nno
Norwegian Bokmål | nb/no | nob
Polish | pl | pol
Portuguese | pt | por
Slovenian | sl | slv
Spanish | es | spa
Swedish | sv | swe
Thai | tt | tha
Malay | ms | zsm
"""
"""
Language short codes => Wordnet Code
"""
AVAILABLE_LANGUAGES = dict([('sq','als'), ('ar', 'arb'), ('bg', 'bul'), ('ca', 'cat'), ('da', 'dan'), ('zh', 'cmn'),
('el','ell'), ('eu', 'eus'), ('fa', 'fas'), ('fi', 'fin'), ('fr', 'fra'),
('gl','glg'), ('he', 'heb'), ('hr', 'hrv'), ('id', 'ind'), ('it', 'ita'),
('ja','jpn'),
('nn', 'nno'), ('nb', 'nob'),
('no', 'nob'), ('pl', 'pol'),
('pt', 'por'),
('qn','qcn'), ('sl', 'slv'), ('es', 'spa'), ('sv', 'swe'), ('tt', 'tha'),
('ms', 'zsm'),
('en', 'eng')])
"""
Language names => Short Code
"""
AVAILABLE_LANGUAGES_NAMES = dict([
('albanian', 'sq'), ('arabic', 'ar'),('bulgarian', 'bg'), ('catalan', 'cat'), ('danish', 'da'),
('chinese', 'zh'), ('basque', 'eu'), ('persian', 'fa'), ('finnish', 'fi'), ('france', 'fr'),
('galician', 'gl'), ('hebrew', 'he'), ('croatian', 'hr'), ('indonesian', 'id'), ('italian', 'it'),
('japanese', 'ja'), ('norwegian_nynorsk', 'nn'), ('norwegian', 'no'), ('norwegian_bokmal', 'nb'),
('polish', 'pl'), ('portuguese', 'pt'), ('slovenian', 'sl'), ('spanish', 'es'),
('swedish', 'sv'), ('thai', 'sv'), ('malay', 'ms'), ('english', 'en')
])
class WordnetManager(object):
def __init__(self, language="en"):
"""
Constructor for the wordnet manager.
It takes a main language.
"""
self.__language = language
def __isLanguageAvailable(self, code=None, language_name=None):
"""
Check if a language is available
"""
if code is None and language_name is None:
raise Exception("Error evaluating the correct language")
if code is not None and code.lower() in AVAILABLE_LANGUAGES:
return True
if language_name is not None and language_name.lower() in AVAILABLE_LANGUAGES_NAMES:
return True
return False
def __nameToWordnetCode(self, name):
"""
It returns the wordnet code for a given language name
"""
if not self.__isLanguageAvailable(language_name=name):
raise Exception("Wordnet code not found for the language name %s " % name)
name = name.lower()
languageShortCode = AVAILABLE_LANGUAGES_NAMES[name]
wordnetCode = self.__shortCodeToWordnetCode(code=languageShortCode)
return wordnetCode
def __shortCodeToWordnetCode(self, shortCode):
"""
It returns the wordnet code from a given language short code
"""
if not self.__isLanguageAvailable(code=shortCode):
raise Exception("Wordnet code not found for the language short code %s " % shortCode)
code = shortCode.lower()
wordnetCode = AVAILABLE_LANGUAGES[code]
return wordnetCode
def __getSynsets(self, word, wordNetCode):
"""
It returns the synsets given both word and language code
"""
from nltk.corpus import wordnet as wn
synsets = wn.synsets(word, lang=wordNetCode)
return synsets
def getLemmas(self, word, languageCode="en"):
"""
Get the lemmas for a given word
:word: The word
:languageCode: The language for a given lemma
"""
wnCode = self.__shortCodeToWordnetCode(shortCode=languageCode)
synsets = self.__getSynsets(word, wnCode) #wn.synsets(word, lang=wnCode)
lemmas = dict([('en', [])])
for synset in synsets:
enLemmas = synset.lemma_names()
lemmas['en'].extend(enLemmas)
if languageCode != "en" and self.__isLanguageAvailable(code=languageCode):
langLemmas = list(sorted(set(synset.lemma_names(lang=wnCode))))
lemmas[languageCode] = langLemmas
lemmas['en'] = list(sorted(set(lemmas.get('en', []))))
return lemmas
def getSynonyms(self, words=[], language_code="en"):
"""
Get the synonyms from a list of words.
:words: A list of words
:language_code: the language for the synonyms.
"""
if words is None or not isinstance(words, list) or list(words) <= 0:
return []
if not self.__isLanguageAvailable(code=language_code):
return []
wnCode = self.__shortCodeToWordnetCode(language_code)
result = {}
for word in words:
result[word] = dict([('lemmas', self.getLemmas(word,languageCode=language_code))])
return result
def getHyponyms(self, words, language_code="en"):
"""
Get specific synsets from a given synset
"""
wnCode = self.__shortCodeToWordnetCode(language_code)
result = {}
for word in words:
synonyms = self.__getSynsets(word, wnCode)
hyponyms = [hyp for synset in synonyms for hyp in synset.hyponyms()]
engLemmas = [hyp.lemma_names() for hyp in hyponyms]
lemmas = dict([('en', list(sorted(set(itertools.chain.from_iterable(engLemmas)), key=lambda s: s.lower())))])
if language_code != "en":
languageLemmas = [hyp.lemma_names(lang=wnCode) for hyp in hyponyms]
languageLemmas = list(sorted(set(itertools.chain.from_iterable(languageLemmas)), key=lambda s: s.lower()))
lemmas[language_code] = languageLemmas
result[word] = dict([ ('lemmas', lemmas), ('language', language_code)])
return result
def getHypernyms(self, words, language_code="en"):
"""
Get general synsets from a given synset
"""
wnCode = self.__shortCodeToWordnetCode(language_code)
result = {}
for word in words:
synonyms = self.__getSynsets(word, wnCode)
hypernyms = [hyp for synset in synonyms for hyp in synset.hypernyms()]
engLemmas = [hyp.lemma_names() for hyp in hypernyms]
lemmas = dict([('en', list(sorted(set(itertools.chain.from_iterable(engLemmas)), key=lambda s: s.lower())))])
if language_code != "en":
languageLemmas = [hyp.lemma_names(lang=wnCode) for hyp in hypernyms]
languageLemmas = list(sorted(set(itertools.chain.from_iterable(languageLemmas)), key=lambda s: s.lower()))
lemmas[language_code] = languageLemmas
result[word] = dict([ ('lemmas', lemmas), ('language', language_code)])
return result
| domenicosolazzo/jroc | jroc/nlp/wordnet/WordnetManager.py | Python | gpl-3.0 | 8,043 |
#############################################################################
# $HeadURL$
#############################################################################
""" ..mod: FTSRequest
=================
Helper class to perform FTS job submission and monitoring.
"""
# # imports
import sys
import re
import time
# # from DIRAC
from DIRAC import gLogger, S_OK, S_ERROR
from DIRAC.Core.Utilities.File import checkGuid
from DIRAC.Core.Utilities.Adler import compareAdler, intAdlerToHex, hexAdlerToInt
from DIRAC.Core.Utilities.SiteSEMapping import getSitesForSE
from DIRAC.Core.Utilities.Time import dateTime
from DIRAC.Resources.Storage.StorageElement import StorageElement
from DIRAC.Resources.Catalog.FileCatalog import FileCatalog
from DIRAC.Core.Utilities.ReturnValues import returnSingleResult
from DIRAC.AccountingSystem.Client.Types.DataOperation import DataOperation
from DIRAC.ConfigurationSystem.Client.Helpers.Resources import Resources
from DIRAC.Core.Security.ProxyInfo import getProxyInfo
from DIRAC.ConfigurationSystem.Client.Helpers.Operations import Operations
from DIRAC.DataManagementSystem.Client.FTSJob import FTSJob
from DIRAC.DataManagementSystem.Client.FTSFile import FTSFile
# # RCSID
__RCSID__ = "$Id$"
class FTSRequest( object ):
"""
.. class:: FTSRequest
Helper class for FTS job submission and monitoring.
"""
# # default checksum type
__defaultCksmType = "ADLER32"
# # flag to disablr/enable checksum test, default: disabled
__cksmTest = False
def __init__( self ):
"""c'tor
:param self: self reference
"""
self.log = gLogger.getSubLogger( self.__class__.__name__, True )
# # final states tuple
self.finalStates = ( 'Canceled', 'Failed', 'Hold',
'Finished', 'FinishedDirty' )
# # failed states tuple
self.failedStates = ( 'Canceled', 'Failed',
'Hold', 'FinishedDirty' )
# # successful states tuple
self.successfulStates = ( 'Finished', 'Done' )
# # all file states tuple
self.fileStates = ( 'Done', 'Active', 'Pending', 'Ready', 'Canceled', 'Failed',
'Finishing', 'Finished', 'Submitted', 'Hold', 'Waiting' )
self.statusSummary = {}
# # request status
self.requestStatus = 'Unknown'
# # dict for FTS job files
self.fileDict = {}
# # dict for replicas information
self.catalogReplicas = {}
# # dict for metadata information
self.catalogMetadata = {}
# # dict for files that failed to register
self.failedRegistrations = {}
# # placehoder for FileCatalog reference
self.oCatalog = None
# # submit timestamp
self.submitTime = ''
# # placeholder FTS job GUID
self.ftsGUID = ''
# # placeholder for FTS server URL
self.ftsServer = ''
# # flag marking FTS job completness
self.isTerminal = False
# # completness percentage
self.percentageComplete = 0.0
# # source SE name
self.sourceSE = ''
# # flag marking source SE validity
self.sourceValid = False
# # source space token
self.sourceToken = ''
# # target SE name
self.targetSE = ''
# # flag marking target SE validity
self.targetValid = False
# # target space token
self.targetToken = ''
# # placeholder for target StorageElement
self.oTargetSE = None
# # placeholder for source StorageElement
self.oSourceSE = None
# # checksum type, set it to default
self.__cksmType = self.__defaultCksmType
# # disable checksum test by default
self.__cksmTest = False
# # statuses that prevent submitting to FTS
self.noSubmitStatus = ( 'Failed', 'Done', 'Staging' )
# # were sources resolved?
self.sourceResolved = False
# # Number of file transfers actually submitted
self.submittedFiles = 0
self.transferTime = 0
self.submitCommand = Operations().getValue( 'DataManagement/FTSPlacement/FTS2/SubmitCommand', 'glite-transfer-submit' )
self.monitorCommand = Operations().getValue( 'DataManagement/FTSPlacement/FTS2/MonitorCommand', 'glite-transfer-status' )
self.ftsJob = None
self.ftsFiles = []
####################################################################
#
# Methods for setting/getting/checking the SEs
#
def setSourceSE( self, se ):
""" set SE for source
:param self: self reference
:param str se: source SE name
"""
if se == self.targetSE:
return S_ERROR( "SourceSE is TargetSE" )
self.sourceSE = se
self.oSourceSE = StorageElement( self.sourceSE )
return self.__checkSourceSE()
def __checkSourceSE( self ):
""" check source SE availability
:param self: self reference
"""
if not self.sourceSE:
return S_ERROR( "SourceSE not set" )
res = self.oSourceSE.isValid( 'Read' )
if not res['OK']:
return S_ERROR( "SourceSE not available for reading" )
res = self.__getSESpaceToken( self.oSourceSE )
if not res['OK']:
self.log.error( "FTSRequest failed to get SRM Space Token for SourceSE", res['Message'] )
return S_ERROR( "SourceSE does not support FTS transfers" )
if self.__cksmTest:
res = self.oSourceSE.getChecksumType()
if not res["OK"]:
self.log.error( "Unable to get checksum type for SourceSE",
"%s: %s" % ( self.sourceSE, res["Message"] ) )
cksmType = res["Value"]
if cksmType in ( "NONE", "NULL" ):
self.log.warn( "Checksum type set to %s at SourceSE %s, disabling checksum test" % ( cksmType,
self.sourceSE ) )
self.__cksmTest = False
elif cksmType != self.__cksmType:
self.log.warn( "Checksum type mismatch, disabling checksum test" )
self.__cksmTest = False
self.sourceToken = res['Value']
self.sourceValid = True
return S_OK()
def setTargetSE( self, se ):
""" set target SE
:param self: self reference
:param str se: target SE name
"""
if se == self.sourceSE:
return S_ERROR( "TargetSE is SourceSE" )
self.targetSE = se
self.oTargetSE = StorageElement( self.targetSE )
return self.__checkTargetSE()
def setTargetToken( self, token ):
""" target space token setter
:param self: self reference
:param str token: target space token
"""
self.targetToken = token
return S_OK()
def __checkTargetSE( self ):
""" check target SE availability
:param self: self reference
"""
if not self.targetSE:
return S_ERROR( "TargetSE not set" )
res = self.oTargetSE.isValid( 'Write' )
if not res['OK']:
return S_ERROR( "TargetSE not available for writing" )
res = self.__getSESpaceToken( self.oTargetSE )
if not res['OK']:
self.log.error( "FTSRequest failed to get SRM Space Token for TargetSE", res['Message'] )
return S_ERROR( "TargetSE does not support FTS transfers" )
# # check checksum types
if self.__cksmTest:
res = self.oTargetSE.getChecksumType()
if not res["OK"]:
self.log.error( "Unable to get checksum type for TargetSE",
"%s: %s" % ( self.targetSE, res["Message"] ) )
cksmType = res["Value"]
if cksmType in ( "NONE", "NULL" ):
self.log.warn( "Checksum type set to %s at TargetSE %s, disabling checksum test" % ( cksmType,
self.targetSE ) )
self.__cksmTest = False
elif cksmType != self.__cksmType:
self.log.warn( "Checksum type mismatch, disabling checksum test" )
self.__cksmTest = False
self.targetToken = res['Value']
self.targetValid = True
return S_OK()
@staticmethod
def __getSESpaceToken( oSE ):
""" get space token from StorageElement instance
:param self: self reference
:param StorageElement oSE: StorageElement instance
"""
res = oSE.getStorageParameters( "SRM2" )
if not res['OK']:
return res
return S_OK( res['Value'].get( 'SpaceToken' ) )
####################################################################
#
# Methods for setting/getting FTS request parameters
#
def setFTSGUID( self, guid ):
""" FTS job GUID setter
:param self: self reference
:param str guid: string containg GUID
"""
if not checkGuid( guid ):
return S_ERROR( "Incorrect GUID format" )
self.ftsGUID = guid
return S_OK()
def setFTSServer( self, server ):
""" FTS server setter
:param self: self reference
:param str server: FTS server URL
"""
self.ftsServer = server
return S_OK()
def isRequestTerminal( self ):
""" check if FTS job has terminated
:param self: self reference
"""
if self.requestStatus in self.finalStates:
self.isTerminal = True
return S_OK( self.isTerminal )
def setCksmTest( self, cksmTest = False ):
""" set cksm test
:param self: self reference
:param bool cksmTest: flag to enable/disable checksum test
"""
self.__cksmTest = bool( cksmTest )
return S_OK( self.__cksmTest )
####################################################################
#
# Methods for setting/getting/checking files and their metadata
#
def setLFN( self, lfn ):
""" add LFN :lfn: to :fileDict:
:param self: self reference
:param str lfn: LFN to add to
"""
self.fileDict.setdefault( lfn, {'Status':'Waiting'} )
return S_OK()
def setSourceSURL( self, lfn, surl ):
""" source SURL setter
:param self: self reference
:param str lfn: LFN
:param str surl: source SURL
"""
target = self.fileDict[lfn].get( 'Target' )
if target == surl:
return S_ERROR( "Source and target the same" )
return self.__setFileParameter( lfn, 'Source', surl )
def getSourceSURL( self, lfn ):
""" get source SURL for LFN :lfn:
:param self: self reference
:param str lfn: LFN
"""
return self.__getFileParameter( lfn, 'Source' )
def setTargetSURL( self, lfn, surl ):
""" set target SURL for LFN :lfn:
:param self: self reference
:param str lfn: LFN
:param str surl: target SURL
"""
source = self.fileDict[lfn].get( 'Source' )
if source == surl:
return S_ERROR( "Source and target the same" )
return self.__setFileParameter( lfn, 'Target', surl )
def getFailReason( self, lfn ):
""" get fail reason for file :lfn:
:param self: self reference
:param str lfn: LFN
"""
return self.__getFileParameter( lfn, 'Reason' )
def getRetries( self, lfn ):
""" get number of attepmts made to transfer file :lfn:
:param self: self reference
:param str lfn: LFN
"""
return self.__getFileParameter( lfn, 'Retries' )
def getTransferTime( self, lfn ):
""" get duration of transfer for file :lfn:
:param self: self reference
:param str lfn: LFN
"""
return self.__getFileParameter( lfn, 'Duration' )
def getFailed( self ):
""" get list of wrongly transferred LFNs
:param self: self reference
"""
return S_OK( [ lfn for lfn in self.fileDict
if self.fileDict[lfn].get( 'Status', '' ) in self.failedStates ] )
def getStaging( self ):
""" get files set for prestaging """
return S_OK( [lfn for lfn in self.fileDict
if self.fileDict[lfn].get( 'Status', '' ) == 'Staging'] )
def getDone( self ):
""" get list of succesfully transferred LFNs
:param self: self reference
"""
return S_OK( [ lfn for lfn in self.fileDict
if self.fileDict[lfn].get( 'Status', '' ) in self.successfulStates ] )
def __setFileParameter( self, lfn, paramName, paramValue ):
""" set :paramName: to :paramValue: for :lfn: file
:param self: self reference
:param str lfn: LFN
:param str paramName: parameter name
:param mixed paramValue: a new parameter value
"""
self.setLFN( lfn )
self.fileDict[lfn][paramName] = paramValue
return S_OK()
def __getFileParameter( self, lfn, paramName ):
""" get value of :paramName: for file :lfn:
:param self: self reference
:param str lfn: LFN
:param str paramName: parameter name
"""
if lfn not in self.fileDict:
return S_ERROR( "Supplied file not set" )
if paramName not in self.fileDict[lfn]:
return S_ERROR( "%s not set for file" % paramName )
return S_OK( self.fileDict[lfn][paramName] )
####################################################################
#
# Methods for submission
#
def submit( self, monitor = False, printOutput = True ):
""" submit FTS job
:param self: self reference
:param bool monitor: flag to monitor progress of FTS job
:param bool printOutput: flag to print output of execution to stdout
"""
res = self.__prepareForSubmission()
if not res['OK']:
return res
res = self.__submitFTSTransfer()
if not res['OK']:
return res
resDict = { 'ftsGUID' : self.ftsGUID, 'ftsServer' : self.ftsServer, 'submittedFiles' : self.submittedFiles }
if monitor or printOutput:
gLogger.always( "Submitted %s@%s" % ( self.ftsGUID, self.ftsServer ) )
if monitor:
self.monitor( untilTerminal = True, printOutput = printOutput, full = False )
return S_OK( resDict )
def __prepareForSubmission( self ):
""" check validity of job before submission
:param self: self reference
"""
if not self.fileDict:
return S_ERROR( "No files set" )
if not self.sourceValid:
return S_ERROR( "SourceSE not valid" )
if not self.targetValid:
return S_ERROR( "TargetSE not valid" )
if not self.ftsServer:
res = self.__resolveFTSServer()
if not res['OK']:
return S_ERROR( "FTSServer not valid" )
self.resolveSource()
self.resolveTarget()
res = self.__filesToSubmit()
if not res['OK']:
return S_ERROR( "No files to submit" )
return S_OK()
def __getCatalogObject( self ):
""" CatalogInterface instance facade
:param self: self reference
"""
try:
if not self.oCatalog:
self.oCatalog = FileCatalog()
return S_OK()
except:
return S_ERROR()
def __updateReplicaCache( self, lfns = None, overwrite = False ):
""" update replica cache for list of :lfns:
:param self: self reference
:param mixed lfns: list of LFNs
:param bool overwrite: flag to trigger cache clearing and updating
"""
if not lfns:
lfns = self.fileDict.keys()
toUpdate = [ lfn for lfn in lfns if ( lfn not in self.catalogReplicas ) or overwrite ]
if not toUpdate:
return S_OK()
res = self.__getCatalogObject()
if not res['OK']:
return res
res = self.oCatalog.getReplicas( toUpdate )
if not res['OK']:
return S_ERROR( "Failed to update replica cache: %s" % res['Message'] )
for lfn, error in res['Value']['Failed'].items():
self.__setFileParameter( lfn, 'Reason', error )
self.__setFileParameter( lfn, 'Status', 'Failed' )
for lfn, replicas in res['Value']['Successful'].items():
self.catalogReplicas[lfn] = replicas
return S_OK()
def __updateMetadataCache( self, lfns = None ):
""" update metadata cache for list of LFNs
:param self: self reference
:param list lnfs: list of LFNs
"""
if not lfns:
lfns = self.fileDict.keys()
toUpdate = [ lfn for lfn in lfns if lfn not in self.catalogMetadata ]
if not toUpdate:
return S_OK()
res = self.__getCatalogObject()
if not res['OK']:
return res
res = self.oCatalog.getFileMetadata( toUpdate )
if not res['OK']:
return S_ERROR( "Failed to get source catalog metadata: %s" % res['Message'] )
for lfn, error in res['Value']['Failed'].items():
self.__setFileParameter( lfn, 'Reason', error )
self.__setFileParameter( lfn, 'Status', 'Failed' )
for lfn, metadata in res['Value']['Successful'].items():
self.catalogMetadata[lfn] = metadata
return S_OK()
def resolveSource( self ):
""" resolve source SE eligible for submission
:param self: self reference
"""
# Avoid resolving sources twice
if self.sourceResolved:
return S_OK()
# Only resolve files that need a transfer
toResolve = [ lfn for lfn in self.fileDict if self.fileDict[lfn].get( "Status", "" ) != "Failed" ]
if not toResolve:
return S_OK()
res = self.__updateMetadataCache( toResolve )
if not res['OK']:
return res
res = self.__updateReplicaCache( toResolve )
if not res['OK']:
return res
# Define the source URLs
for lfn in toResolve:
replicas = self.catalogReplicas.get( lfn, {} )
if self.sourceSE not in replicas:
gLogger.warn( "resolveSource: skipping %s - not replicas at SourceSE %s" % ( lfn, self.sourceSE ) )
self.__setFileParameter( lfn, 'Reason', "No replica at SourceSE" )
self.__setFileParameter( lfn, 'Status', 'Failed' )
continue
res = returnSingleResult( self.oSourceSE.getURL( lfn, protocol = 'srm' ) )
if not res['OK']:
gLogger.warn( "resolveSource: skipping %s - %s" % ( lfn, res["Message"] ) )
self.__setFileParameter( lfn, 'Reason', res['Message'] )
self.__setFileParameter( lfn, 'Status', 'Failed' )
continue
res = self.setSourceSURL( lfn, res['Value'] )
if not res['OK']:
gLogger.warn( "resolveSource: skipping %s - %s" % ( lfn, res["Message"] ) )
self.__setFileParameter( lfn, 'Reason', res['Message'] )
self.__setFileParameter( lfn, 'Status', 'Failed' )
continue
toResolve = []
for lfn in self.fileDict:
if "Source" in self.fileDict[lfn]:
toResolve.append( lfn )
if not toResolve:
return S_ERROR( "No eligible Source files" )
# Get metadata of the sources, to check for existance, availability and caching
res = self.oSourceSE.getFileMetadata( toResolve )
if not res['OK']:
return S_ERROR( "Failed to check source file metadata" )
for lfn, error in res['Value']['Failed'].items():
if re.search( 'File does not exist', error ):
gLogger.warn( "resolveSource: skipping %s - source file does not exists" % lfn )
self.__setFileParameter( lfn, 'Reason', "Source file does not exist" )
self.__setFileParameter( lfn, 'Status', 'Failed' )
else:
gLogger.warn( "resolveSource: skipping %s - failed to get source metadata" % lfn )
self.__setFileParameter( lfn, 'Reason', "Failed to get Source metadata" )
self.__setFileParameter( lfn, 'Status', 'Failed' )
toStage = []
nbStagedFiles = 0
for lfn, metadata in res['Value']['Successful'].items():
lfnStatus = self.fileDict.get( lfn, {} ).get( 'Status' )
if metadata['Unavailable']:
gLogger.warn( "resolveSource: skipping %s - source file unavailable" % lfn )
self.__setFileParameter( lfn, 'Reason', "Source file Unavailable" )
self.__setFileParameter( lfn, 'Status', 'Failed' )
elif metadata['Lost']:
gLogger.warn( "resolveSource: skipping %s - source file lost" % lfn )
self.__setFileParameter( lfn, 'Reason', "Source file Lost" )
self.__setFileParameter( lfn, 'Status', 'Failed' )
elif not metadata['Cached']:
if lfnStatus != 'Staging':
toStage.append( lfn )
elif metadata['Size'] != self.catalogMetadata[lfn]['Size']:
gLogger.warn( "resolveSource: skipping %s - source file size mismatch" % lfn )
self.__setFileParameter( lfn, 'Reason', "Source size mismatch" )
self.__setFileParameter( lfn, 'Status', 'Failed' )
elif self.catalogMetadata[lfn]['Checksum'] and metadata['Checksum'] and \
not compareAdler( metadata['Checksum'], self.catalogMetadata[lfn]['Checksum'] ):
gLogger.warn( "resolveSource: skipping %s - source file checksum mismatch" % lfn )
self.__setFileParameter( lfn, 'Reason', "Source checksum mismatch" )
self.__setFileParameter( lfn, 'Status', 'Failed' )
elif lfnStatus == 'Staging':
# file that was staging is now cached
self.__setFileParameter( lfn, 'Status', 'Waiting' )
nbStagedFiles += 1
# Some files were being staged
if nbStagedFiles:
self.log.info( 'resolveSource: %d files have been staged' % nbStagedFiles )
# Launching staging of files not in cache
if toStage:
gLogger.warn( "resolveSource: %s source files not cached, prestaging..." % len( toStage ) )
stage = self.oSourceSE.prestageFile( toStage )
if not stage["OK"]:
gLogger.error( "resolveSource: error is prestaging", stage["Message"] )
for lfn in toStage:
self.__setFileParameter( lfn, 'Reason', stage["Message"] )
self.__setFileParameter( lfn, 'Status', 'Failed' )
else:
for lfn in toStage:
if lfn in stage['Value']['Successful']:
self.__setFileParameter( lfn, 'Status', 'Staging' )
elif lfn in stage['Value']['Failed']:
self.__setFileParameter( lfn, 'Reason', stage['Value']['Failed'][lfn] )
self.__setFileParameter( lfn, 'Status', 'Failed' )
self.sourceResolved = True
return S_OK()
def resolveTarget( self ):
""" find target SE eligible for submission
:param self: self reference
"""
toResolve = [ lfn for lfn in self.fileDict
if self.fileDict[lfn].get( 'Status' ) not in self.noSubmitStatus ]
if not toResolve:
return S_OK()
res = self.__updateReplicaCache( toResolve )
if not res['OK']:
return res
for lfn in toResolve:
res = returnSingleResult( self.oTargetSE.getURL( lfn, protocol = 'srm' ) )
if not res['OK']:
reason = res.get( 'Message', res['Message'] )
gLogger.warn( "resolveTarget: skipping %s - %s" % ( lfn, reason ) )
self.__setFileParameter( lfn, 'Reason', reason )
self.__setFileParameter( lfn, 'Status', 'Failed' )
continue
res = self.setTargetSURL( lfn, res['Value'] )
if not res['OK']:
gLogger.warn( "resolveTarget: skipping %s - %s" % ( lfn, res["Message"] ) )
self.__setFileParameter( lfn, 'Reason', res['Message'] )
self.__setFileParameter( lfn, 'Status', 'Failed' )
continue
toResolve = []
for lfn in self.fileDict:
if "Target" in self.fileDict[lfn]:
toResolve.append( lfn )
if not toResolve:
return S_ERROR( "No eligible Target files" )
res = self.oTargetSE.exists( toResolve )
if not res['OK']:
return S_ERROR( "Failed to check target existence" )
for lfn, error in res['Value']['Failed'].items():
self.__setFileParameter( lfn, 'Reason', error )
self.__setFileParameter( lfn, 'Status', 'Failed' )
toRemove = []
for lfn, exists in res['Value']['Successful'].items():
if exists:
res = self.getSourceSURL( lfn )
if not res['OK']:
gLogger.warn( "resolveTarget: skipping %s - target exists" % lfn )
self.__setFileParameter( lfn, 'Reason', "Target exists" )
self.__setFileParameter( lfn, 'Status', 'Failed' )
elif res['Value'] == self.fileDict[lfn]['Target']:
gLogger.warn( "resolveTarget: skipping %s - source and target pfns are the same" % lfn )
self.__setFileParameter( lfn, 'Reason', "Source and Target the same" )
self.__setFileParameter( lfn, 'Status', 'Failed' )
else:
toRemove.append( lfn )
if toRemove:
self.oTargetSE.removeFile( toRemove )
return S_OK()
def __filesToSubmit( self ):
"""
check if there is at least one file to submit
:return: S_OK if at least one file is present, S_ERROR otherwise
"""
for lfn in self.fileDict:
lfnStatus = self.fileDict[lfn].get( 'Status' )
source = self.fileDict[lfn].get( 'Source' )
target = self.fileDict[lfn].get( 'Target' )
if lfnStatus not in self.noSubmitStatus and source and target:
return S_OK()
return S_ERROR()
def __createFTSFiles( self ):
""" create LFNs file for glite-transfer-submit command
This file consists one line for each fiel to be transferred:
sourceSURL targetSURL [CHECKSUMTYPE:CHECKSUM]
:param self: self reference
"""
self.__updateMetadataCache()
for lfn in self.fileDict:
lfnStatus = self.fileDict[lfn].get( 'Status' )
if lfnStatus not in self.noSubmitStatus:
cksmStr = ""
# # add chsmType:cksm only if cksmType is specified, else let FTS decide by itself
if self.__cksmTest and self.__cksmType:
checkSum = self.catalogMetadata.get( lfn, {} ).get( 'Checksum' )
if checkSum:
cksmStr = " %s:%s" % ( self.__cksmType, intAdlerToHex( hexAdlerToInt( checkSum ) ) )
ftsFile = FTSFile()
ftsFile.LFN = lfn
ftsFile.SourceSURL = self.fileDict[lfn].get( 'Source' )
ftsFile.TargetSURL = self.fileDict[lfn].get( 'Target' )
ftsFile.SourceSE = self.sourceSE
ftsFile.TargetSE = self.targetSE
ftsFile.Status = self.fileDict[lfn].get( 'Status' )
ftsFile.Checksum = cksmStr
ftsFile.Size = self.catalogMetadata.get( lfn, {} ).get( 'Size' )
self.ftsFiles.append( ftsFile )
self.submittedFiles += 1
return S_OK()
def __createFTSJob( self, guid = None ):
self.__createFTSFiles()
ftsJob = FTSJob()
ftsJob.RequestID = 0
ftsJob.OperationID = 0
ftsJob.SourceSE = self.sourceSE
ftsJob.TargetSE = self.targetSE
ftsJob.SourceToken = self.sourceToken
ftsJob.TargetToken = self.targetToken
ftsJob.FTSServer = self.ftsServer
if guid:
ftsJob.FTSGUID = guid
for ftsFile in self.ftsFiles:
ftsFile.Attempt += 1
ftsFile.Error = ""
ftsJob.addFile( ftsFile )
self.ftsJob = ftsJob
def __submitFTSTransfer( self ):
""" create and execute glite-transfer-submit CLI command
:param self: self reference
"""
log = gLogger.getSubLogger( 'Submit' )
self.__createFTSJob()
submit = self.ftsJob.submitFTS2( command = self.submitCommand )
if not submit["OK"]:
log.error( "unable to submit FTSJob: %s" % submit["Message"] )
return submit
log.info( "FTSJob '%s'@'%s' has been submitted" % ( self.ftsJob.FTSGUID, self.ftsJob.FTSServer ) )
# # update statuses for job files
for ftsFile in self.ftsJob:
ftsFile.FTSGUID = self.ftsJob.FTSGUID
ftsFile.Status = "Submitted"
ftsFile.Attempt += 1
log.info( "FTSJob '%s'@'%s' has been submitted" % ( self.ftsJob.FTSGUID, self.ftsJob.FTSServer ) )
self.ftsGUID = self.ftsJob.FTSGUID
return S_OK()
def __resolveFTSServer( self ):
"""
resolve FTS server to use, it should be the closest one from target SE
:param self: self reference
"""
from DIRAC.ConfigurationSystem.Client.Helpers.Resources import getFTSServersForSites
if not self.targetSE:
return S_ERROR( "Target SE not set" )
res = getSitesForSE( self.targetSE )
if not res['OK'] or not res['Value']:
return S_ERROR( "Could not determine target site" )
targetSites = res['Value']
targetSite = ''
for targetSite in targetSites:
targetFTS = getFTSServersForSites( [targetSite] )
if targetFTS['OK']:
ftsTarget = targetFTS['Value'][targetSite]
if ftsTarget:
self.ftsServer = ftsTarget
return S_OK( self.ftsServer )
else:
return targetFTS
return S_ERROR( 'No FTS server found for %s' % targetSite )
####################################################################
#
# Methods for monitoring
#
def summary( self, untilTerminal = False, printOutput = False ):
""" summary of FTS job
:param self: self reference
:param bool untilTerminal: flag to monitor FTS job to its final state
:param bool printOutput: flag to print out monitoring information to the stdout
"""
res = self.__isSummaryValid()
if not res['OK']:
return res
while not self.isTerminal:
res = self.__parseOutput( full = True )
if not res['OK']:
return res
if untilTerminal:
self.__print()
self.isRequestTerminal()
if res['Value'] or ( not untilTerminal ):
break
time.sleep( 1 )
if untilTerminal:
print ""
if printOutput and ( not untilTerminal ):
return self.dumpSummary( printOutput = printOutput )
return S_OK()
def monitor( self, untilTerminal = False, printOutput = False, full = True ):
""" monitor FTS job
:param self: self reference
:param bool untilTerminal: flag to monitor FTS job to its final state
:param bool printOutput: flag to print out monitoring information to the stdout
"""
if not self.ftsJob:
self.resolveSource()
self.__createFTSJob( self.ftsGUID )
res = self.__isSummaryValid()
if not res['OK']:
return res
if untilTerminal:
res = self.summary( untilTerminal = untilTerminal, printOutput = printOutput )
if not res['OK']:
return res
res = self.__parseOutput( full = full )
if not res['OK']:
return res
if untilTerminal:
self.finalize()
if printOutput:
self.dump()
return res
def dumpSummary( self, printOutput = False ):
""" get FTS job summary as str
:param self: self reference
:param bool printOutput: print summary to stdout
"""
outStr = ''
for status in sorted( self.statusSummary ):
if self.statusSummary[status]:
outStr = '%s\t%-10s : %-10s\n' % ( outStr, status, str( self.statusSummary[status] ) )
outStr = outStr.rstrip( '\n' )
if printOutput:
print outStr
return S_OK( outStr )
def __print( self ):
""" print progress bar of FTS job completeness to stdout
:param self: self reference
"""
width = 100
bits = int( ( width * self.percentageComplete ) / 100 )
outStr = "|%s>%s| %.1f%s %s %s" % ( "="*bits, " "*( width - bits ),
self.percentageComplete, "%",
self.requestStatus, " "*10 )
sys.stdout.write( "%s\r" % ( outStr ) )
sys.stdout.flush()
def dump( self ):
""" print FTS job parameters and files to stdout
:param self: self reference
"""
print "%-10s : %-10s" % ( "Status", self.requestStatus )
print "%-10s : %-10s" % ( "Source", self.sourceSE )
print "%-10s : %-10s" % ( "Target", self.targetSE )
print "%-10s : %-128s" % ( "Server", self.ftsServer )
print "%-10s : %-128s" % ( "GUID", self.ftsGUID )
for lfn in sorted( self.fileDict ):
print "\n %-15s : %-128s" % ( 'LFN', lfn )
for key in ['Source', 'Target', 'Status', 'Reason', 'Duration']:
print " %-15s : %-128s" % ( key, str( self.fileDict[lfn].get( key ) ) )
return S_OK()
def __isSummaryValid( self ):
""" check validity of FTS job summary report
:param self: self reference
"""
if not self.ftsServer:
return S_ERROR( "FTSServer not set" )
if not self.ftsGUID:
return S_ERROR( "FTSGUID not set" )
return S_OK()
def __parseOutput( self, full = False ):
""" execute glite-transfer-status command and parse its output
:param self: self reference
:param bool full: glite-transfer-status verbosity level, when set, collect information of files as well
"""
monitor = self.ftsJob.monitorFTS2( command = self.monitorCommand, full = full )
if not monitor['OK']:
return monitor
self.percentageComplete = self.ftsJob.Completeness
self.requestStatus = self.ftsJob.Status
self.submitTime = self.ftsJob.SubmitTime
statusSummary = monitor['Value']
if statusSummary:
for state in statusSummary:
self.statusSummary[state] = statusSummary[state]
self.transferTime = 0
for ftsFile in self.ftsJob:
lfn = ftsFile.LFN
self.__setFileParameter( lfn, 'Status', ftsFile.Status )
self.__setFileParameter( lfn, 'Reason', ftsFile.Error )
self.__setFileParameter( lfn, 'Duration', ftsFile._duration )
targetURL = self.__getFileParameter( lfn, 'Target' )
if not targetURL['OK']:
self.__setFileParameter( lfn, 'Target', ftsFile.TargetSURL )
self.transferTime += int( ftsFile._duration )
return S_OK()
####################################################################
#
# Methods for finalization
#
def finalize( self ):
""" finalize FTS job
:param self: self reference
"""
self.__updateMetadataCache()
transEndTime = dateTime()
regStartTime = time.time()
res = self.getTransferStatistics()
transDict = res['Value']
res = self.__registerSuccessful( transDict['transLFNs'] )
regSuc, regTotal = res['Value']
regTime = time.time() - regStartTime
if self.sourceSE and self.targetSE:
self.__sendAccounting( regSuc, regTotal, regTime, transEndTime, transDict )
return S_OK()
def getTransferStatistics( self ):
""" collect information of Transfers that can be used by Accounting
:param self: self reference
"""
transDict = { 'transTotal': len( self.fileDict ),
'transLFNs': [],
'transOK': 0,
'transSize': 0 }
for lfn in self.fileDict:
if self.fileDict[lfn].get( 'Status' ) in self.successfulStates:
if self.fileDict[lfn].get( 'Duration', 0 ):
transDict['transLFNs'].append( lfn )
transDict['transOK'] += 1
if lfn in self.catalogMetadata:
transDict['transSize'] += self.catalogMetadata[lfn].get( 'Size', 0 )
return S_OK( transDict )
def getFailedRegistrations( self ):
""" get failed registrations dict
:param self: self reference
"""
return S_OK( self.failedRegistrations )
def __registerSuccessful( self, transLFNs ):
""" register successfully transferred files to the catalogs,
fill failedRegistrations dict for files that failed to register
:param self: self reference
:param list transLFNs: LFNs in FTS job
"""
self.failedRegistrations = {}
toRegister = {}
for lfn in transLFNs:
res = returnSingleResult( self.oTargetSE.getURL( self.fileDict[lfn].get( 'Target' ), protocol = 'srm' ) )
if not res['OK']:
self.__setFileParameter( lfn, 'Reason', res['Message'] )
self.__setFileParameter( lfn, 'Status', 'Failed' )
else:
toRegister[lfn] = { 'PFN' : res['Value'], 'SE' : self.targetSE }
if not toRegister:
return S_OK( ( 0, 0 ) )
res = self.__getCatalogObject()
if not res['OK']:
for lfn in toRegister:
self.failedRegistrations = toRegister
self.log.error( 'Failed to get Catalog Object', res['Message'] )
return S_OK( ( 0, len( toRegister ) ) )
res = self.oCatalog.addReplica( toRegister )
if not res['OK']:
self.failedRegistrations = toRegister
self.log.error( 'Failed to get Catalog Object', res['Message'] )
return S_OK( ( 0, len( toRegister ) ) )
for lfn, error in res['Value']['Failed'].items():
self.failedRegistrations[lfn] = toRegister[lfn]
self.log.error( 'Registration of Replica failed', '%s : %s' % ( lfn, str( error ) ) )
return S_OK( ( len( res['Value']['Successful'] ), len( toRegister ) ) )
def __sendAccounting( self, regSuc, regTotal, regTime, transEndTime, transDict ):
""" send accounting record
:param self: self reference
:param regSuc: number of files successfully registered
:param regTotal: number of files attepted to register
:param regTime: time stamp at the end of registration
:param transEndTime: time stamp at the end of FTS job
:param dict transDict: dict holding couters for files being transerred, their sizes and successfull transfers
"""
oAccounting = DataOperation()
oAccounting.setEndTime( transEndTime )
oAccounting.setStartTime( self.submitTime )
accountingDict = {}
accountingDict['OperationType'] = 'replicateAndRegister'
result = getProxyInfo()
if not result['OK']:
userName = 'system'
else:
userName = result['Value'].get( 'username', 'unknown' )
accountingDict['User'] = userName
accountingDict['Protocol'] = 'FTS' if 'fts3' not in self.ftsServer else 'FTS3'
accountingDict['RegistrationTime'] = regTime
accountingDict['RegistrationOK'] = regSuc
accountingDict['RegistrationTotal'] = regTotal
accountingDict['TransferOK'] = transDict['transOK']
accountingDict['TransferTotal'] = transDict['transTotal']
accountingDict['TransferSize'] = transDict['transSize']
accountingDict['FinalStatus'] = self.requestStatus
accountingDict['Source'] = self.sourceSE
accountingDict['Destination'] = self.targetSE
accountingDict['TransferTime'] = self.transferTime
oAccounting.setValuesFromDict( accountingDict )
self.log.verbose( "Attempting to commit accounting message..." )
oAccounting.commit()
self.log.verbose( "...committed." )
return S_OK()
| miloszz/DIRAC | DataManagementSystem/Client/FTSRequest.py | Python | gpl-3.0 | 37,261 |
import sys, os, urllib, time, socket, mt, ssl
from dlmanager.NZB import NZBParser
from dlmanager.NZB.nntplib2 import NNTP_SSL,NNTPError,NNTP, NNTPReplyError
from dlmanager.NZB.Decoder import ArticleDecoder
class StatusReport(object):
def __init__(self):
self.message = "Downloading.."
self.total_bytes = 0
self.current_bytes = 0
self.completed = False
self.error_occured = False
self.start_time = 0
self.file_name = ""
self.kbps = 0
self.assembly = False
self.assembly_percent = 0
class NZBClient():
def __init__(self, nzbFile, save_to, nntpServer, nntpPort, nntpUser=None, nntpPassword=None, nntpSSL=False, nntpConnections=5, cache_path=""):
# Settings
self.save_to = save_to
self.nntpServer = nntpServer
self.nntpUser = nntpUser
self.nntpPort = nntpPort
self.nntpPassword = nntpPassword
self.nntpSSL = nntpSSL
self.nntpConnections = nntpConnections
self.threads = []
self.running = False
# setup our cache folder.
self.cache_path = cache_path
if ( self.cache_path == "" ): self.cache_path = "packages/dlmanager/cache/"
self.clearCache()
# ensure both directorys exist
mt.utils.mkdir(self.save_to)
mt.utils.mkdir(self.cache_path)
# Open the NZB, get this show started.
realFile = urllib.urlopen(nzbFile)
self.nzb = NZBParser.parse(realFile)
self.all_decoded = False
self.connection_count = 0
# used to track status.
self.status = StatusReport()
self.status.file_name = nzbFile
self.status.total_bytes = self.nzb.size
# Segment tracking.
self.cache = []
self.segment_list = []
self.segments_finished = []
self.segments_aborted = []
# Queues.
self.segment_queue = []
self.failed_queue = []
# Used to track the speed.
self.speedTime = 0
self.speedCounter = 0
def start(self):
# keep track of running time.
self.status.start_time = time.time()
self.running = True
# Generate a list of segments and build our queue.
for file in self.nzb.files:
for seg in file.segments:
self.segment_list.append(seg.msgid)
self.segment_queue.append(seg)
# start the connections.
for a in range(0, self.nntpConnections):
thread = NNTPConnection(a,
self.nntpServer,
self.nntpPort,
self.nntpUser,
self.nntpPassword,
self.nntpSSL,
self.nextSeg,
self.segComplete,
self.segFailed,
self.threadStopped)
self.threads.append(thread)
self.connection_count += 1
thread.start()
# start the article decoder.
self.articleDecoder = ArticleDecoder(self.decodeNextSeg,
self.save_to,
self.cache_path,
self.decodeFinished,
self.decodeSuccess,
self.decodeFailed,
self.assemblyStatus)
self.articleDecoder.start()
def getStatus(self):
return self.status
# Article Decoder - Next segment.
def decodeNextSeg(self):
# if we're not running send an instant kill switch.
if ( not self.running ): return -1
# try to grab a segment from the cache to decode.
seg = None
try:
seg = self.cache.pop()
except:
pass
if ( seg == None ) and ( self.all_decoded ):
return -1
return seg
# Article Decoder - Decoded all segments.
def decodeFinished(self):
self.status.completed = True
# Article Decoder - Decode success.
def decodeSuccess(self, seg):
self.status.current_bytes += seg.size
self.segments_finished.append(seg.msgid)
if ( (len(self.segments_finished)+len(self.segments_aborted)) >= len(self.segment_list) ):
self.all_decoded = True
# Article Decoder - Decode failed.
def decodeFailed(self, seg):
if ( seg == None ): return
mt.log.debug("Segment failed to decode: " + seg.msgid)
self.segFailed(seg)
# Article Decoder - Assembly Status.
def assemblyStatus(self, percent):
self.status.assembly = True
self.status.assembly_percent = percent
# NNTP Connection - Thread stopped.
def threadStopped(self, thread_num):
self.connection_count -= 1
# NNTP Connection - Segment completed.
def segComplete(self, seg):
if ( seg == None ): return
if ( seg.data ):
data_size = len("".join(seg.data))
current_time = time.time()
if ( (current_time - self.speedTime) > 1 ):
self.status.kbps = self.speedCounter
self.speedCounter = 0
self.speedTime = current_time
else:
self.speedCounter += (data_size/1024)
self.cache.append(seg)
#mt.log.debug("Segment Complete: " + seg.msgid)
# NNTP Connection - Download of segment failed.
def segFailed(self, seg):
if ( seg == None ): return
if ( seg.aborted() ):
mt.log.error("Segment Aborted: " + seg.msgid + " after " + str(seg.retries) + " attempts.")
self.segments_aborted.append(seg.msgid)
seg.data = []
if ( (len(self.segments_finished)+len(self.segments_aborted)) >= len(self.segment_list) ):
self.all_decoded = True
return
seg.retries += 1
mt.log.error("Segment Failed: " + seg.msgid + " Attempt #" + str(seg.retries) + ".")
self.failed_queue.append(seg)
# NNTP Connection - Next Segment
def nextSeg(self):
# if we're not running send an instant kill switch.
if ( not self.running ): return -1
# try to get a segment from main queue or failed queue.
queue_empty = False
seg = None
try:
seg = self.segment_queue.pop()
except:
try:
seg = self.failed_queue.pop()
except:
queue_empty = True
pass
pass
# We're all outta segments, if they're done decoding, kill the threads.
if ( queue_empty ) and ( self.all_decoded ):
return -1
return seg
# empty the cache of any files.
def clearCache(self):
mt.utils.rmdir(self.cache_path)
def stop(self):
self.running = False
self.articleDecoder.stop()
for thread in self.threads:
thread.stop()
self.clearCache()
class NNTPConnection(mt.threads.Thread):
def __init__(self, connection_number, server, port, username, password, ssl, nextSegFunc, onSegComplete = None, onSegFailed = None, onThreadStop = None):
mt.threads.Thread.__init__(self)
# Settings
self.connection = None
self.connection_number = connection_number
self.server = server
self.port = port
self.username = username
self.password = password
self.ssl = ssl
# Events.
self.nextSegFunc = nextSegFunc
self.onSegComplete = onSegComplete
self.onSegFailed = onSegFailed
self.onThreadStop = onThreadStop
def connect(self):
# Open either an SSL or regular NNTP connection.
try:
if ( self.ssl ):
self.connection = NNTP_SSL(self.server, self.port, self.username, self.password, False, True, timeout=15)
else:
self.connection = NNTP(self.server, self.port, self.username, self.password, False, True, timeout=15)
except:
pass
if ( self.connection ): return True
return False
def disconnect(self):
if ( self.connection ):
try:
self.connection.quit()
except:
pass
self.connection = None
def run(self):
connection = None
seg = None
# Thread has started.
mt.log.debug("Thread " + str(self.connection_number) + " started.")
start_time = time.time()
while(self.running):
seg = None
connected = self.connect()
if ( connected ):
while(self.running):
seg = self.nextSegFunc()
# Out of segments, sleep for a bit and see if we get anymore.
if ( seg == None ):
self.sleep(0.1)
continue
# Download complete, bail.
if ( seg == -1 ):
self.running = False
seg = None
break
# Attempt to grab a segment.
try:
resp, nr, id, data = self.connection.body("<%s>" % seg.msgid)
if resp[0] == "2":
seg.data = data
if ( self.onSegComplete ): self.onSegComplete(seg)
seg = None
except ssl.SSLError:
break
except NNTPError as e:
mt.log.error("Error getting segment: " + e.response)
pass
except:
mt.log.error("Error getting segment.")
pass
if ( seg and self.onSegFailed ):
self.onSegFailed(seg)
seg = None
# Disconnect when we're finished.
if ( seg and self.onSegFailed ): self.onSegFailed(seg)
self.disconnect()
else:
mt.log.error("Connection error. Reconnecting in 3 seconds.")
self.sleep(3)
# Thread has ended.
self.disconnect() # just to be safe.
end_time = time.time()
mt.log.debug("Thread " + str(self.connection_number) + " stopped after " + str(end_time-start_time) + " seconds.")
if ( self.onThreadStop ): self.onThreadStop(self.connection_number)
| andr3wmac/metaTower | packages/dlmanager/NZB/NZBClient.py | Python | gpl-3.0 | 10,499 |
"""Tests for `fix.with_fixture`."""
from __future__ import with_statement
import os
import shutil
import tempfile
from types import FunctionType
from fix import with_fixture
def test_exists():
"""`fix.with_fixture` function exists"""
assert isinstance(with_fixture, FunctionType)
def test_setup_only():
"""`setup_only` fixture works as expected"""
def setup_only(context):
"""A fixture with no `teardown()`."""
def setup():
"""Add something to the context."""
assert context == {}
context.squee = "kapow"
return setup
@with_fixture(setup_only)
def case(context):
"""Check that the context has been set up."""
assert context == {"squee": "kapow"}
case() # pylint: disable=E1120
def test_setup_teardown():
"""`setup_teardown` fixture works as expected"""
def setup_teardown(context):
"""A fixture with both `setup()` and `teardown()`."""
def setup():
"""Add something to the context."""
assert context == {}
context.squee = "kapow"
def teardown():
"""Check that `context.squee` has changed."""
assert context == {"squee": "boing"}
return setup, teardown
@with_fixture(setup_teardown)
def case(context):
"""Alter the context."""
assert context == {"squee": "kapow"}
context.squee = "boing"
case() # pylint: disable=E1120
def test_multiple_invocation():
"""`multiple` fixture creates a fresh context each invocation"""
def multiple(context):
"""A fixture to be invoked multiple times."""
def setup():
"""Add something to the context."""
assert context == {}
context.squee = "kapow"
def teardown():
"""Check that `context.squee` has changed."""
assert context == {"squee": "kapow", "boing": "thunk"}
return setup, teardown
@with_fixture(multiple)
def case(context):
"""Add to the context."""
assert context == {"squee": "kapow"}
context.boing = "thunk"
for _ in range(3):
case() # pylint: disable=E1120
def test_external():
"""`external` fixture interacts as expected with the 'real world'."""
def external(context, files=3):
"""A fixture to manipulate temporary files and directories."""
def setup():
"""Create some temporary files."""
context.temp_dir = tempfile.mkdtemp()
context.filenames = ["file_%03d" % i for i in range(files)]
for filename in context.filenames:
with open(os.path.join(context.temp_dir, filename), "w") as f:
f.write("This is the file %r.\n" % filename)
def teardown():
"""Delete the temporary files created in `setup()`."""
shutil.rmtree(context.temp_dir)
return setup, teardown
@with_fixture(external, files=5)
def check_files(context):
"""Return the number of present and absent files."""
present = 0
absent = 0
for filename in context.filenames:
if os.path.exists(os.path.join(context.temp_dir, filename)):
present += 1
else:
absent += 1
return context.temp_dir, present, absent
temp_dir, present, absent = check_files() # pylint: disable=E1120
assert not os.path.exists(temp_dir)
assert present == 5
assert absent == 0
| schesis/fix | tests/decorators/test_with_fixture.py | Python | gpl-3.0 | 3,536 |
# Copyright (C) 2012,2013
# Max Planck Institute for Polymer Research
# Copyright (C) 2008,2009,2010,2011
# Max-Planck-Institute for Polymer Research & Fraunhofer SCAI
#
# This file is part of ESPResSo++.
#
# ESPResSo++ is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ESPResSo++ is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
r"""
******************************
espressopp.integrator.CapForce
******************************
This class can be used to forcecap all particles or a group of particles.
Force capping means that the force vector of a particle is rescaled
so that the length of the force vector is <= capforce
Example Usage:
>>> capforce = espressopp.integrator.CapForce(system, 1000.0)
>>> integrator.addExtension(capForce)
CapForce can also be used to forcecap only a group of particles:
>>> particle_group = [45, 67, 89, 103]
>>> capforce = espressopp.integrator.CapForce(system, 1000.0, particle_group)
>>> integrator.addExtension(capForce)
.. function:: espressopp.integrator.CapForce(system, capForce, particleGroup)
:param system:
:param capForce:
:param particleGroup: (default: None)
:type system:
:type capForce:
:type particleGroup:
"""
from espressopp.esutil import cxxinit
from espressopp import pmi
from espressopp.integrator.Extension import *
from _espressopp import integrator_CapForce
class CapForceLocal(ExtensionLocal, integrator_CapForce):
def __init__(self, system, capForce, particleGroup = None):
if not (pmi._PMIComm and pmi._PMIComm.isActive()) or pmi._MPIcomm.rank in pmi._PMIComm.getMPIcpugroup():
if (particleGroup == None) or (particleGroup.size() == 0):
cxxinit(self, integrator_CapForce, system, capForce)
else:
cxxinit(self, integrator_CapForce, system, capForce, particleGroup)
if pmi.isController :
class CapForce(Extension, metaclass=pmi.Proxy):
pmiproxydefs = dict(
cls = 'espressopp.integrator.CapForceLocal',
pmicall = ['setCapForce', 'setAbsCapForce', 'getCapForce', 'getAbsCapForce'],
pmiproperty = [ 'particleGroup', 'adress' ]
)
| espressopp/espressopp | src/integrator/CapForce.py | Python | gpl-3.0 | 2,764 |
# -*- coding: utf-8 -*-
"""proyectoP4 URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, url, patterns
from django.contrib import admin
from Workinout import views
from django.conf import settings
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^Workinout/', include('Workinout.urls')), # ADD THIS NEW TUPLE!media/(?P<path>.*)
]
if settings.DEBUG:
urlpatterns += patterns(
'django.views.static',
(r'media/(?P<path>.*)',
'serve',
{'document_root': settings.MEDIA_ROOT}), )
else:
urlpatterns += patterns('', url(r'^static/(?P<path>.*)$', 'django.views.static.serve', {'document_root': settings.STATIC_PATH}),
)
| jesmorc/Workinout | proyectoP4/urls.py | Python | gpl-3.0 | 1,273 |
#!/usr/bin/python
# This programs is intended to manage patches and apply them automatically
# through email in an automated fashion.
#
# Copyright (C) 2008 Imran M Yousuf (imran@smartitengineering.com)
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import poplib, email, re, sys, xmlConfigs, utils;
class ReferenceNode :
def __init__(self, node, emailMessage, references=list(), children=dict(), slotted=bool("false")):
self.node = node
self.children = dict(children)
self.references = references[:]
self.slotted = slotted
self.emailMessage = emailMessage
def get_node(self):
return self.node
def get_children(self):
return self.children
def set_node(self, node):
self.node = node
def set_children(self, children):
self.children = children
def get_references(self):
return self.references
def is_slotted(self):
return self.slotted
def set_slotted(self, slotted):
self.slotted = slotted
def get_message(self):
return self.emailMessage
def __repr__(self):
return self.node + "\nREF: " + str(self.references) + "\nChildren: " + str(self.children.keys()) + "\n"
def handleNode(currentNodeInAction, referenceNodeNow, referencesToCheck, patchMessageReferenceNode):
for reference in referencesToCheck[:] :
if reference in referenceNodeNow.get_children() :
referencesToCheck.remove(reference)
return patchMessageReferenceNode[reference]
if len(referencesToCheck) == 0 :
referenceNodeNow.get_children()[currentNodeInAction.get_node()] = currentNodeInAction
def makeChildren(patchMessageReferenceNode) :
ref_keys = patchMessageReferenceNode.keys()
ref_keys.sort()
for messageId in ref_keys:
referenceNode = patchMessageReferenceNode[messageId]
utils.verboseOutput(verbose, "Managing Message Id:", referenceNode.get_node())
referenceIds = referenceNode.get_references()
referenceIdsClone = referenceIds[:]
utils.verboseOutput(verbose, "Cloned References: ", referenceIdsClone)
if len(referenceIds) > 0 :
nextNode = patchMessageReferenceNode[referenceIdsClone[0]]
referenceIdsClone.remove(referenceIdsClone[0])
while nextNode != None :
utils.verboseOutput(verbose, "Next Node: ", nextNode.get_node())
utils.verboseOutput(verbose, "Curent Node: ", referenceNode.get_node())
utils.verboseOutput(verbose, "REF: ", referenceIdsClone)
nextNode = handleNode(referenceNode, nextNode, referenceIdsClone, patchMessageReferenceNode)
if __name__ == "__main__":
arguments = sys.argv
verbose = "false"
pseudoArgs = arguments[:]
while len(pseudoArgs) > 1 :
argument = pseudoArgs[1]
if argument == "-v" or argument == "--verbose" :
verbose = "true"
pseudoArgs.remove(argument)
utils.verboseOutput(verbose, "Checking POP3 for gmail")
try:
emailConfig = xmlConfigs.initializePopConfig("./email-configuration.xml")
myPop = emailConfig.get_pop3_connection()
numMessages = len(myPop.list()[1])
patchMessages = dict()
for i in range(numMessages):
utils.verboseOutput(verbose, "Index: ", i)
totalContent = ""
for content in myPop.retr(i+1)[1]:
totalContent += content + '\n'
msg = email.message_from_string(totalContent)
if 'subject' in msg :
subject = msg['subject']
subjectPattern = "^\[.*PATCH.*\].+"
subjectMatch = re.match(subjectPattern, subject)
utils.verboseOutput(verbose, "Checking subject: ", subject)
if subjectMatch == None :
continue
else :
continue
messageId = ""
if 'message-id' in msg:
messageId = re.search("<(.*)>", msg['message-id']).group(1)
utils.verboseOutput(verbose, 'Message-ID:', messageId)
referenceIds = []
if 'references' in msg:
references = msg['references']
referenceIds = re.findall("<(.*)>", references)
utils.verboseOutput(verbose, "References: ", referenceIds)
currentNode = ReferenceNode(messageId, msg, referenceIds)
patchMessages[messageId] = currentNode
currentNode.set_slotted(bool("false"))
utils.verboseOutput(verbose, "**************Make Children**************")
makeChildren(patchMessages)
utils.verboseOutput(verbose, "--------------RESULT--------------")
utils.verboseOutput(verbose, patchMessages)
except:
utils.verboseOutput(verbose, "Error: ", sys.exc_info())
| imyousuf/smart-patcher | src/smart-patcher.py | Python | gpl-3.0 | 5,526 |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.4 on 2017-02-20 22:01
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('erudit', '0065_auto_20170202_1152'),
]
operations = [
migrations.AddField(
model_name='issue',
name='force_free_access',
field=models.BooleanField(default=False, verbose_name='Contraindre en libre accès'),
),
]
| erudit/zenon | eruditorg/erudit/migrations/0066_issue_force_free_access.py | Python | gpl-3.0 | 505 |
# Example implementing 5 layer encoder
# Original code taken from
# https://github.com/aymericdamien/TensorFlow-Examples/blob/master/examples/3_NeuralNetworks/autoencoder.py
# The model trained here is restored in load.py
from __future__ import division, print_function, absolute_import
# Import MNIST data
# from tensorflow.examples.tutorials.mnist import input_data
# data_set = input_data.read_data_sets("/tmp/data/", one_hot=True)
# Import libraries
import tensorflow as tf
import numpy as np
import matplotlib.pyplot as plt
import sys
import scipy.io as sio
sys.path.insert(0, '../..') # Add path to where TF_Model.py is, if not in the same dir
from TF_Model import *
from utils import *
# 01 thumb
# 10 pinky
action_map = {}
action_map[1] = [0,1]
action_map[2] = [1,0]
# thumb up
mat_contents_t0 = sio.loadmat('/home/linda/school/capstone/data/set2_new_format/EMGjan5/Fred_pinky_Jan5_0.mat')
mat_contents_t1 = sio.loadmat('/home/linda/school/capstone/data/set2_new_format/EMGjan5/Fred_pinky_Jan5_1.mat')
mat_contents_test0 = sio.loadmat('/home/linda/school/capstone/data/set2_new_format/EMGjan5/Fred_pinky_jan5_2.mat')
data_t0 = mat_contents_t0['EMGdata']
data_t1 = mat_contents_t1['EMGdata']
data_test0 = mat_contents_test0['EMGdata']
batch_y_t0, batch_x_t0 = get_batch_from_raw_data_new_format(data_t0, action_map, [0])
batch_y_t1, batch_x_t1 = get_batch_from_raw_data_new_format(data_t1, action_map, [0])
batch_y_test0, batch_x_test0 = get_batch_from_raw_data_new_format(data_test0, action_map, [0])
# pinky up
mat_contents_p0 = sio.loadmat('/home/linda/school/capstone/data/set2_new_format/EMGjan5/Fred_thumb_Jan5_0.mat')
mat_contents_p1 = sio.loadmat('/home/linda/school/capstone/data/set2_new_format/EMGjan5/Fred_thumb_Jan5_1.mat')
mat_contents_test1 = sio.loadmat('/home/linda/school/capstone/data/set2_new_format/EMGjan5/Fred_thumb_Jan5_2.mat')
data_p0 = mat_contents_p0['EMGdata']
data_p1 = mat_contents_p1['EMGdata']
data_test1 = mat_contents_test1['EMGdata']
batch_y_p0, batch_x_p0 = get_batch_from_raw_data_new_format(data_p0, action_map, [0])
batch_y_p1, batch_x_p1 = get_batch_from_raw_data_new_format(data_p1, action_map, [0])
batch_y_test1, batch_x_test1 = get_batch_from_raw_data_new_format(data_test1, action_map, [0])
print("done reading data")
# Create TF_Model, a wrapper for models created using tensorflow
# Note that the configuration file 'config.txt' must be present in the directory
model = TF_Model('model')
# Parameters
learning_rate = 0.05
training_epochs = 200
batch_size = 256
display_step = 1
examples_to_show = 10
# total_batch = int(data_set.train.num_examples/batch_size)
dropout = tf.placeholder(tf.float32)
# Create variables for inputs, outputs and predictions
x = tf.placeholder(tf.float32, [None, 1000])
y = tf.placeholder(tf.float32, [None, 2])
y_true = y
y_pred = model.predict(x)
# Cost function
cost = tf.reduce_mean(tf.pow(y_true - y_pred, 2))
optimizer = tf.train.RMSPropOptimizer(learning_rate).minimize(cost)
# Initializing the variables
init = tf.initialize_all_variables()
sess = tf.Session()
sess.run(init)
model_output = model.predict(x)
cross_entropy = tf.reduce_mean(-tf.reduce_sum(y * tf.log(model_output), reduction_indices=[1]))
train_step = tf.train.GradientDescentOptimizer(0.5).minimize(cross_entropy)
correct_prediction = tf.equal(tf.argmax(model_output,1), tf.argmax(y,1))
accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
# Train
for epoch in range(training_epochs):
_, c = sess.run([optimizer, cost], feed_dict={x: batch_x_t0, y: batch_y_t0})
_, c = sess.run([optimizer, cost], feed_dict={x: batch_x_t1, y: batch_y_t1})
_, c = sess.run([optimizer, cost], feed_dict={x: batch_x_p0, y: batch_y_p0})
_, c = sess.run([optimizer, cost], feed_dict={x: batch_x_p1, y: batch_y_p1})
# Display logs per epoch step
print("Epoch:", '%04d' % (epoch+1), "cost=", "{:.9f}".format(c))
print(sess.run(accuracy, feed_dict={x: batch_x_test0, y: batch_y_test0}))
print(sess.run(accuracy, feed_dict={x: batch_x_test1, y: batch_y_test1}))
print("===final===")
print(sess.run(accuracy, feed_dict={x: batch_x_test0, y: batch_y_test0}))
print(sess.run(accuracy, feed_dict={x: batch_x_test1, y: batch_y_test1}))
# Save
model.save(sess, 'example_3')
| LindaLS/Sausage_Biscuits | architecture/tests/2_test/train.py | Python | gpl-3.0 | 4,265 |
"""
Tests for closeness centrality.
"""
import pytest
import networkx as nx
from networkx.testing import almost_equal
class TestClosenessCentrality:
@classmethod
def setup_class(cls):
cls.K = nx.krackhardt_kite_graph()
cls.P3 = nx.path_graph(3)
cls.P4 = nx.path_graph(4)
cls.K5 = nx.complete_graph(5)
cls.C4 = nx.cycle_graph(4)
cls.T = nx.balanced_tree(r=2, h=2)
cls.Gb = nx.Graph()
cls.Gb.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3), (2, 4), (4, 5), (3, 5)])
F = nx.florentine_families_graph()
cls.F = F
cls.LM = nx.les_miserables_graph()
# Create random undirected, unweighted graph for testing incremental version
cls.undirected_G = nx.fast_gnp_random_graph(n=100, p=0.6, seed=123)
cls.undirected_G_cc = nx.closeness_centrality(cls.undirected_G)
def test_wf_improved(self):
G = nx.union(self.P4, nx.path_graph([4, 5, 6]))
c = nx.closeness_centrality(G)
cwf = nx.closeness_centrality(G, wf_improved=False)
res = {0: 0.25, 1: 0.375, 2: 0.375, 3: 0.25, 4: 0.222, 5: 0.333, 6: 0.222}
wf_res = {0: 0.5, 1: 0.75, 2: 0.75, 3: 0.5, 4: 0.667, 5: 1.0, 6: 0.667}
for n in G:
assert almost_equal(c[n], res[n], places=3)
assert almost_equal(cwf[n], wf_res[n], places=3)
def test_digraph(self):
G = nx.path_graph(3, create_using=nx.DiGraph())
c = nx.closeness_centrality(G)
cr = nx.closeness_centrality(G.reverse())
d = {0: 0.0, 1: 0.500, 2: 0.667}
dr = {0: 0.667, 1: 0.500, 2: 0.0}
for n in sorted(self.P3):
assert almost_equal(c[n], d[n], places=3)
assert almost_equal(cr[n], dr[n], places=3)
def test_k5_closeness(self):
c = nx.closeness_centrality(self.K5)
d = {0: 1.000, 1: 1.000, 2: 1.000, 3: 1.000, 4: 1.000}
for n in sorted(self.K5):
assert almost_equal(c[n], d[n], places=3)
def test_p3_closeness(self):
c = nx.closeness_centrality(self.P3)
d = {0: 0.667, 1: 1.000, 2: 0.667}
for n in sorted(self.P3):
assert almost_equal(c[n], d[n], places=3)
def test_krackhardt_closeness(self):
c = nx.closeness_centrality(self.K)
d = {
0: 0.529,
1: 0.529,
2: 0.500,
3: 0.600,
4: 0.500,
5: 0.643,
6: 0.643,
7: 0.600,
8: 0.429,
9: 0.310,
}
for n in sorted(self.K):
assert almost_equal(c[n], d[n], places=3)
def test_florentine_families_closeness(self):
c = nx.closeness_centrality(self.F)
d = {
"Acciaiuoli": 0.368,
"Albizzi": 0.483,
"Barbadori": 0.4375,
"Bischeri": 0.400,
"Castellani": 0.389,
"Ginori": 0.333,
"Guadagni": 0.467,
"Lamberteschi": 0.326,
"Medici": 0.560,
"Pazzi": 0.286,
"Peruzzi": 0.368,
"Ridolfi": 0.500,
"Salviati": 0.389,
"Strozzi": 0.4375,
"Tornabuoni": 0.483,
}
for n in sorted(self.F):
assert almost_equal(c[n], d[n], places=3)
def test_les_miserables_closeness(self):
c = nx.closeness_centrality(self.LM)
d = {
"Napoleon": 0.302,
"Myriel": 0.429,
"MlleBaptistine": 0.413,
"MmeMagloire": 0.413,
"CountessDeLo": 0.302,
"Geborand": 0.302,
"Champtercier": 0.302,
"Cravatte": 0.302,
"Count": 0.302,
"OldMan": 0.302,
"Valjean": 0.644,
"Labarre": 0.394,
"Marguerite": 0.413,
"MmeDeR": 0.394,
"Isabeau": 0.394,
"Gervais": 0.394,
"Listolier": 0.341,
"Tholomyes": 0.392,
"Fameuil": 0.341,
"Blacheville": 0.341,
"Favourite": 0.341,
"Dahlia": 0.341,
"Zephine": 0.341,
"Fantine": 0.461,
"MmeThenardier": 0.461,
"Thenardier": 0.517,
"Cosette": 0.478,
"Javert": 0.517,
"Fauchelevent": 0.402,
"Bamatabois": 0.427,
"Perpetue": 0.318,
"Simplice": 0.418,
"Scaufflaire": 0.394,
"Woman1": 0.396,
"Judge": 0.404,
"Champmathieu": 0.404,
"Brevet": 0.404,
"Chenildieu": 0.404,
"Cochepaille": 0.404,
"Pontmercy": 0.373,
"Boulatruelle": 0.342,
"Eponine": 0.396,
"Anzelma": 0.352,
"Woman2": 0.402,
"MotherInnocent": 0.398,
"Gribier": 0.288,
"MmeBurgon": 0.344,
"Jondrette": 0.257,
"Gavroche": 0.514,
"Gillenormand": 0.442,
"Magnon": 0.335,
"MlleGillenormand": 0.442,
"MmePontmercy": 0.315,
"MlleVaubois": 0.308,
"LtGillenormand": 0.365,
"Marius": 0.531,
"BaronessT": 0.352,
"Mabeuf": 0.396,
"Enjolras": 0.481,
"Combeferre": 0.392,
"Prouvaire": 0.357,
"Feuilly": 0.392,
"Courfeyrac": 0.400,
"Bahorel": 0.394,
"Bossuet": 0.475,
"Joly": 0.394,
"Grantaire": 0.358,
"MotherPlutarch": 0.285,
"Gueulemer": 0.463,
"Babet": 0.463,
"Claquesous": 0.452,
"Montparnasse": 0.458,
"Toussaint": 0.402,
"Child1": 0.342,
"Child2": 0.342,
"Brujon": 0.380,
"MmeHucheloup": 0.353,
}
for n in sorted(self.LM):
assert almost_equal(c[n], d[n], places=3)
def test_weighted_closeness(self):
edges = [
("s", "u", 10),
("s", "x", 5),
("u", "v", 1),
("u", "x", 2),
("v", "y", 1),
("x", "u", 3),
("x", "v", 5),
("x", "y", 2),
("y", "s", 7),
("y", "v", 6),
]
XG = nx.Graph()
XG.add_weighted_edges_from(edges)
c = nx.closeness_centrality(XG, distance="weight")
d = {"y": 0.200, "x": 0.286, "s": 0.138, "u": 0.235, "v": 0.200}
for n in sorted(XG):
assert almost_equal(c[n], d[n], places=3)
#
# Tests for incremental closeness centrality.
#
@staticmethod
def pick_add_edge(g):
u = nx.utils.arbitrary_element(g)
possible_nodes = set(g.nodes())
neighbors = list(g.neighbors(u)) + [u]
possible_nodes.difference_update(neighbors)
v = nx.utils.arbitrary_element(possible_nodes)
return (u, v)
@staticmethod
def pick_remove_edge(g):
u = nx.utils.arbitrary_element(g)
possible_nodes = list(g.neighbors(u))
v = nx.utils.arbitrary_element(possible_nodes)
return (u, v)
def test_directed_raises(self):
with pytest.raises(nx.NetworkXNotImplemented):
dir_G = nx.gn_graph(n=5)
prev_cc = None
edge = self.pick_add_edge(dir_G)
insert = True
nx.incremental_closeness_centrality(dir_G, edge, prev_cc, insert)
def test_wrong_size_prev_cc_raises(self):
with pytest.raises(nx.NetworkXError):
G = self.undirected_G.copy()
edge = self.pick_add_edge(G)
insert = True
prev_cc = self.undirected_G_cc.copy()
prev_cc.pop(0)
nx.incremental_closeness_centrality(G, edge, prev_cc, insert)
def test_wrong_nodes_prev_cc_raises(self):
with pytest.raises(nx.NetworkXError):
G = self.undirected_G.copy()
edge = self.pick_add_edge(G)
insert = True
prev_cc = self.undirected_G_cc.copy()
num_nodes = len(prev_cc)
prev_cc.pop(0)
prev_cc[num_nodes] = 0.5
nx.incremental_closeness_centrality(G, edge, prev_cc, insert)
def test_zero_centrality(self):
G = nx.path_graph(3)
prev_cc = nx.closeness_centrality(G)
edge = self.pick_remove_edge(G)
test_cc = nx.incremental_closeness_centrality(G, edge, prev_cc, insertion=False)
G.remove_edges_from([edge])
real_cc = nx.closeness_centrality(G)
shared_items = set(test_cc.items()) & set(real_cc.items())
assert len(shared_items) == len(real_cc)
assert 0 in test_cc.values()
def test_incremental(self):
# Check that incremental and regular give same output
G = self.undirected_G.copy()
prev_cc = None
for i in range(5):
if i % 2 == 0:
# Remove an edge
insert = False
edge = self.pick_remove_edge(G)
else:
# Add an edge
insert = True
edge = self.pick_add_edge(G)
# start = timeit.default_timer()
test_cc = nx.incremental_closeness_centrality(G, edge, prev_cc, insert)
# inc_elapsed = (timeit.default_timer() - start)
# print(f"incremental time: {inc_elapsed}")
if insert:
G.add_edges_from([edge])
else:
G.remove_edges_from([edge])
# start = timeit.default_timer()
real_cc = nx.closeness_centrality(G)
# reg_elapsed = (timeit.default_timer() - start)
# print(f"regular time: {reg_elapsed}")
# Example output:
# incremental time: 0.208
# regular time: 0.276
# incremental time: 0.00683
# regular time: 0.260
# incremental time: 0.0224
# regular time: 0.278
# incremental time: 0.00804
# regular time: 0.208
# incremental time: 0.00947
# regular time: 0.188
assert set(test_cc.items()) == set(real_cc.items())
prev_cc = test_cc
| SpaceGroupUCL/qgisSpaceSyntaxToolkit | esstoolkit/external/networkx/algorithms/centrality/tests/test_closeness_centrality.py | Python | gpl-3.0 | 10,220 |
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding unique constraint on 'Vendeur', fields ['code_permanent']
db.create_unique(u'encefal_vendeur', ['code_permanent'])
def backwards(self, orm):
# Removing unique constraint on 'Vendeur', fields ['code_permanent']
db.delete_unique(u'encefal_vendeur', ['code_permanent'])
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'encefal.exemplaire': {
'Meta': {'object_name': 'Exemplaire'},
'actif': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'date_creation': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_modification': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'etat': ('django.db.models.fields.CharField', [], {'default': "'VENT'", 'max_length': '4'}),
'facture': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'exemplaires'", 'null': 'True', 'db_column': "'facture'", 'to': u"orm['encefal.Facture']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'livre': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'exemplaires'", 'db_column': "'livre'", 'to': u"orm['encefal.Livre']"}),
'prix': ('django.db.models.fields.IntegerField', [], {}),
'vendeur': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'exemplaires'", 'db_column': "'vendeur'", 'to': u"orm['encefal.Vendeur']"})
},
u'encefal.facture': {
'Meta': {'object_name': 'Facture'},
'actif': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'date_creation': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_modification': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'employe': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'factures'", 'blank': 'True', 'db_column': "'employe'", 'to': u"orm['auth.User']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'session': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'factures'", 'blank': 'True', 'db_column': "'session'", 'to': u"orm['encefal.Session']"})
},
u'encefal.livre': {
'Meta': {'object_name': 'Livre'},
'actif': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'auteur': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'date_creation': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_modification': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'edition': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'isbn': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '13', 'blank': 'True'}),
'titre': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'vendeur': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'livres'", 'symmetrical': 'False', 'through': u"orm['encefal.Exemplaire']", 'db_column': "'vendeur'", 'to': u"orm['encefal.Vendeur']"})
},
u'encefal.session': {
'Meta': {'object_name': 'Session'},
'actif': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'date_creation': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_debut': ('django.db.models.fields.DateField', [], {}),
'date_fin': ('django.db.models.fields.DateField', [], {}),
'date_modification': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'nom': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'})
},
u'encefal.vendeur': {
'Meta': {'object_name': 'Vendeur'},
'actif': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'code_permanent': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '12'}),
'date_creation': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_modification': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '255'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'nom': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'prenom': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'telephone': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'})
}
}
complete_apps = ['encefal'] | nilovna/EnceFAL | project/encefal/migrations/0003_auto__add_unique_vendeur_code_permanent.py | Python | gpl-3.0 | 8,651 |
# $Id$
# Copyright 2013 Matthew Wall
# See the file LICENSE.txt for your full rights.
#
# Thanks to Eddie De Pieri for the first Python implementation for WS-28xx.
# Eddie did the difficult work of decompiling HeavyWeather then converting
# and reverse engineering into a functional Python implementation. Eddie's
# work was based on reverse engineering of HeavyWeather 2800 v 1.54
#
# Thanks to Lucas Heijst for enumerating the console message types and for
# debugging the transceiver/console communication timing issues.
"""Classes and functions for interfacing with WS-28xx weather stations.
LaCrosse makes a number of stations in the 28xx series, including:
WS-2810, WS-2810U-IT
WS-2811, WS-2811SAL-IT, WS-2811BRN-IT, WS-2811OAK-IT
WS-2812, WS-2812U-IT
WS-2813
WS-2814, WS-2814U-IT
WS-2815, WS-2815U-IT
C86234
The station is also sold as the TFA Primus, TFA Opus, and TechnoLine.
HeavyWeather is the software provided by LaCrosse.
There are two versions of HeavyWeather for the WS-28xx series: 1.5.4 and 1.5.4b
Apparently there is a difference between TX59UN-1-IT and TX59U-IT models (this
identifier is printed on the thermo-hygro sensor).
HeavyWeather Version Firmware Version Thermo-Hygro Model
1.54 333 or 332 TX59UN-1-IT
1.54b 288, 262, 222 TX59U-IT
HeavyWeather provides the following weather station settings:
time display: 12|24 hour
temperature display: C|F
air pressure display: inhg|hpa
wind speed display: m/s|knots|bft|km/h|mph
rain display: mm|inch
recording interval: 1m
keep weather station in hi-speed communication mode: true/false
According to the HeavyWeatherPro User Manual (1.54, rev2), "Hi speed mode wears
down batteries on your display much faster, and similarly consumes more power
on the PC. We do not believe most users need to enable this setting. It was
provided at the request of users who prefer ultra-frequent uploads."
The HeavyWeatherPro 'CurrentWeather' view is updated as data arrive from the
console. The console sends current weather data approximately every 13
seconds.
Historical data are updated less frequently - every 2 hours in the default
HeavyWeatherPro configuration.
According to the User Manual, "The 2800 series weather station uses the
'original' wind chill calculation rather than the 2001 'North American'
formula because the original formula is international."
Apparently the station console determines when data will be sent, and, once
paired, the transceiver is always listening. The station console sends a
broadcast on the hour. If the transceiver responds, the station console may
continue to broadcast data, depending on the transceiver response and the
timing of the transceiver response.
According to the C86234 Operations Manual (Revision 7):
- Temperature and humidity data are sent to the console every 13 seconds.
- Wind data are sent to the temperature/humidity sensor every 17 seconds.
- Rain data are sent to the temperature/humidity sensor every 19 seconds.
- Air pressure is measured every 15 seconds.
Each tip of the rain bucket is 0.26 mm of rain.
The following information was obtained by logging messages from the ws28xx.py
driver in weewx and by capturing USB messages between Heavy Weather Pro for
ws2800 and the TFA Primus Weather Station via windows program USB sniffer
busdog64_v0.2.1.
Pairing
The transceiver must be paired with a console before it can receive data. Each
frame sent by the console includes the device identifier of the transceiver
with which it is paired.
Synchronizing
When the console and transceiver stop communicating, they can be synchronized
by one of the following methods:
- Push the SET button on the console
- Wait till the next full hour when the console sends a clock message
In each case a Request Time message is received by the transceiver from the
console. The 'Send Time to WS' message should be sent within ms (10 ms
typical). The transceiver should handle the 'Time SET' message then send a
'Time/Config written' message about 85 ms after the 'Send Time to WS' message.
When complete, the console and transceiver will have been synchronized.
Timing
Current Weather messages, History messages, getConfig/setConfig messages, and
setTime messages each have their own timing. Missed History messages - as a
result of bad timing - result in console and transceiver becoming out of synch.
Current Weather
The console periodically sends Current Weather messages, each with the latest
values from the sensors. The CommModeInterval determines how often the console
will send Current Weather messages.
History
The console records data periodically at an interval defined by the
HistoryInterval parameter. The factory default setting is 2 hours.
Each history record contains a timestamp. Timestamps use the time from the
console clock. The console can record up to 1797 history records.
Reading 1795 history records took about 110 minutes on a raspberry pi, for
an average of 3.6 seconds per history record.
Reading 1795 history records took 65 minutes on a synology ds209+ii, for
an average of 2.2 seconds per history record.
Reading 1750 history records took 19 minutes using HeavyWeatherPro on a
Windows 7 64-bit laptop.
Message Types
The first byte of a message determines the message type.
ID Type Length
01 ? 0x0f (15)
d0 SetRX 0x15 (21)
d1 SetTX 0x15 (21)
d5 SetFrame 0x111 (273)
d6 GetFrame 0x111 (273)
d7 SetState 0x15 (21)
d8 SetPreamblePattern 0x15 (21)
d9 Execute 0x0f (15)
dc ReadConfigFlash< 0x15 (21)
dd ReadConfigFlash> 0x15 (21)
de GetState 0x0a (10)
f0 WriteReg 0x05 (5)
In the following sections, some messages are decomposed using the following
structure:
start position in message buffer
hi-lo data starts on first (hi) or second (lo) nibble
chars data length in characters (nibbles)
rem remark
name variable
-------------------------------------------------------------------------------
1. 01 message (15 bytes)
000: 01 15 00 0b 08 58 3f 53 00 00 00 00 ff 15 0b (detected via USB sniffer)
000: 01 15 00 57 01 92 3f 53 00 00 00 00 ff 15 0a (detected via USB sniffer)
00: messageID
02-15: ??
-------------------------------------------------------------------------------
2. SetRX message (21 bytes)
000: d0 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00
020: 00
00: messageID
01-20: 00
-------------------------------------------------------------------------------
3. SetTX message (21 bytes)
000: d1 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00
020: 00
00: messageID
01-20: 00
-------------------------------------------------------------------------------
4. SetFrame message (273 bytes)
Action:
00: rtGetHistory - Ask for History message
01: rtSetTime - Ask for Send Time to weather station message
02: rtSetConfig - Ask for Send Config to weather station message
03: rtGetConfig - Ask for Config message
05: rtGetCurrent - Ask for Current Weather message
c0: Send Time - Send Time to WS
40: Send Config - Send Config to WS
000: d5 00 09 DevID 00 CfgCS cIntThisAdr xx xx xx rtGetHistory
000: d5 00 09 DevID 01 CfgCS cIntThisAdr xx xx xx rtReqSetTime
000: d5 00 09 f0 f0 02 CfgCS cIntThisAdr xx xx xx rtReqFirstConfig
000: d5 00 09 DevID 02 CfgCS cIntThisAdr xx xx xx rtReqSetConfig
000: d5 00 09 DevID 03 CfgCS cIntThisAdr xx xx xx rtGetConfig
000: d5 00 09 DevID 05 CfgCS cIntThisAdr xx xx xx rtGetCurrent
000: d5 00 0c DevID c0 CfgCS [TimeData . .. .. .. Send Time
000: d5 00 30 DevID 40 CfgCS [ConfigData .. .. .. Send Config
All SetFrame messages:
00: messageID
01: 00
02: Message Length (starting with next byte)
03-04: DeviceID [DevID]
05: Action
06-07: Config checksum [CfgCS]
Additional bytes rtGetCurrent, rtGetHistory, rtSetTime messages:
08-09hi: ComInt [cINT] 1.5 bytes (high byte first)
09lo-11: ThisHistoryAddress [ThisAdr] 2.5 bytes (high byte first)
Additional bytes Send Time message:
08: seconds
09: minutes
10: hours
11hi: DayOfWeek
11lo: day_lo (low byte)
12hi: month_lo (low byte)
12lo: day_hi (high byte)
13hi: (year-2000)_lo (low byte)
13lo: month_hi (high byte)
14lo: (year-2000)_hi (high byte)
-------------------------------------------------------------------------------
5. GetFrame message
Response type:
20: WS SetTime / SetConfig - Data written
40: GetConfig
60: Current Weather
80: Actual / Outstanding History
a1: Request First-Time Config
a2: Request SetConfig
a3: Request SetTime
000: 00 00 06 DevID 20 64 CfgCS xx xx xx xx xx xx xx xx xx Time/Config written
000: 00 00 30 DevID 40 64 [ConfigData .. .. .. .. .. .. .. GetConfig
000: 00 00 d7 DevID 60 64 CfgCS [CurData .. .. .. .. .. .. Current Weather
000: 00 00 1e DevID 80 64 CfgCS 0LateAdr 0ThisAdr [HisData Outstanding History
000: 00 00 1e DevID 80 64 CfgCS 0LateAdr 0ThisAdr [HisData Actual History
000: 00 00 06 DevID a1 64 CfgCS xx xx xx xx xx xx xx xx xx Request FirstConfig
000: 00 00 06 DevID a2 64 CfgCS xx xx xx xx xx xx xx xx xx Request SetConfig
000: 00 00 06 DevID a3 64 CfgCS xx xx xx xx xx xx xx xx xx Request SetTime
ReadConfig example:
000: 01 2e 40 5f 36 53 02 00 00 00 00 81 00 04 10 00 82 00 04 20
020: 00 71 41 72 42 00 05 00 00 00 27 10 00 02 83 60 96 01 03 07
040: 21 04 01 00 00 00 CfgCS
WriteConfig example:
000: 01 2e 40 64 36 53 02 00 00 00 00 00 10 04 00 81 00 20 04 00
020: 82 41 71 42 72 00 00 05 00 00 00 10 27 01 96 60 83 02 01 04
040: 21 07 03 10 00 00 CfgCS
00: messageID
01: 00
02: Message Length (starting with next byte)
03-04: DeviceID [devID]
05hi: responseType
06: Quality (in steps of 5)
Additional byte GetFrame messages except Request SetConfig and Request SetTime:
05lo: BatteryStat 8=WS bat low; 4=TMP bat low; 2=RAIN bat low; 1=WIND bat low
Additional byte Request SetConfig and Request SetTime:
05lo: RequestID
Additional bytes all GetFrame messages except ReadConfig and WriteConfig
07-08: Config checksum [CfgCS]
Additional bytes Outstanding History:
09lo-11: LatestHistoryAddress [LateAdr] 2.5 bytes (Latest to sent)
12lo-14: ThisHistoryAddress [ThisAdr] 2.5 bytes (Outstanding)
Additional bytes Actual History:
09lo-11: LatestHistoryAddress [ThisAdr] 2.5 bytes (LatestHistoryAddress is the)
12lo-14: ThisHistoryAddress [ThisAdr] 2.5 bytes (same as ThisHistoryAddress)
Additional bytes ReadConfig and WriteConfig
43-45: ResetMinMaxFlags (Output only; not included in checksum calculation)
46-47: Config checksum [CfgCS] (CheckSum = sum of bytes (00-42) + 7)
-------------------------------------------------------------------------------
6. SetState message
000: d7 00 00 00 00 00 00 00 00 00 00 00 00 00 00
00: messageID
01-14: 00
-------------------------------------------------------------------------------
7. SetPreamblePattern message
000: d8 aa 00 00 00 00 00 00 00 00 00 00 00 00 00
00: messageID
01: ??
02-14: 00
-------------------------------------------------------------------------------
8. Execute message
000: d9 05 00 00 00 00 00 00 00 00 00 00 00 00 00
00: messageID
01: ??
02-14: 00
-------------------------------------------------------------------------------
9. ReadConfigFlash in - receive data
000: dc 0a 01 f5 00 01 78 a0 01 02 0a 0c 0c 01 2e ff ff ff ff ff - freq correction
000: dc 0a 01 f9 01 02 0a 0c 0c 01 2e ff ff ff ff ff ff ff ff ff - transceiver data
00: messageID
01: length
02-03: address
Additional bytes frequency correction
05lo-07hi: frequency correction
Additional bytes transceiver data
05-10: serial number
09-10: DeviceID [devID]
-------------------------------------------------------------------------------
10. ReadConfigFlash out - ask for data
000: dd 0a 01 f5 cc cc cc cc cc cc cc cc cc cc cc - Ask for freq correction
000: dd 0a 01 f9 cc cc cc cc cc cc cc cc cc cc cc - Ask for transceiver data
00: messageID
01: length
02-03: address
04-14: cc
-------------------------------------------------------------------------------
11. GetState message
000: de 14 00 00 00 00 (between SetPreamblePattern and first de16 message)
000: de 15 00 00 00 00 Idle message
000: de 16 00 00 00 00 Normal message
000: de 0b 00 00 00 00 (detected via USB sniffer)
00: messageID
01: stateID
02-05: 00
-------------------------------------------------------------------------------
12. Writereg message
000: f0 08 01 00 00 - AX5051RegisterNames.IFMODE
000: f0 10 01 41 00 - AX5051RegisterNames.MODULATION
000: f0 11 01 07 00 - AX5051RegisterNames.ENCODING
...
000: f0 7b 01 88 00 - AX5051RegisterNames.TXRATEMID
000: f0 7c 01 23 00 - AX5051RegisterNames.TXRATELO
000: f0 7d 01 35 00 - AX5051RegisterNames.TXDRIVER
00: messageID
01: register address
02: 01
03: AX5051RegisterName
04: 00
-------------------------------------------------------------------------------
13. Current Weather message
start hi-lo chars rem name
0 hi 4 DevID
2 hi 2 Action
3 hi 2 Quality
4 hi 4 DeviceCS
6 hi 4 6 _AlarmRingingFlags
8 hi 1 _WeatherTendency
8 lo 1 _WeatherState
9 hi 1 not used
9 lo 10 _TempIndoorMinMax._Max._Time
14 lo 10 _TempIndoorMinMax._Min._Time
19 lo 5 _TempIndoorMinMax._Max._Value
22 hi 5 _TempIndoorMinMax._Min._Value
24 lo 5 _TempIndoor (C)
27 lo 10 _TempOutdoorMinMax._Max._Time
32 lo 10 _TempOutdoorMinMax._Min._Time
37 lo 5 _TempOutdoorMinMax._Max._Value
40 hi 5 _TempOutdoorMinMax._Min._Value
42 lo 5 _TempOutdoor (C)
45 hi 1 not used
45 lo 10 1 _WindchillMinMax._Max._Time
50 lo 10 2 _WindchillMinMax._Min._Time
55 lo 5 1 _WindchillMinMax._Max._Value
57 hi 5 1 _WindchillMinMax._Min._Value
60 lo 6 _Windchill (C)
63 hi 1 not used
63 lo 10 _DewpointMinMax._Max._Time
68 lo 10 _DewpointMinMax._Min._Time
73 lo 5 _DewpointMinMax._Max._Value
76 hi 5 _DewpointMinMax._Min._Value
78 lo 5 _Dewpoint (C)
81 hi 10 _HumidityIndoorMinMax._Max._Time
86 hi 10 _HumidityIndoorMinMax._Min._Time
91 hi 2 _HumidityIndoorMinMax._Max._Value
92 hi 2 _HumidityIndoorMinMax._Min._Value
93 hi 2 _HumidityIndoor (%)
94 hi 10 _HumidityOutdoorMinMax._Max._Time
99 hi 10 _HumidityOutdoorMinMax._Min._Time
104 hi 2 _HumidityOutdoorMinMax._Max._Value
105 hi 2 _HumidityOutdoorMinMax._Min._Value
106 hi 2 _HumidityOutdoor (%)
107 hi 10 3 _RainLastMonthMax._Time
112 hi 6 3 _RainLastMonthMax._Max._Value
115 hi 6 _RainLastMonth (mm)
118 hi 10 3 _RainLastWeekMax._Time
123 hi 6 3 _RainLastWeekMax._Max._Value
126 hi 6 _RainLastWeek (mm)
129 hi 10 _Rain24HMax._Time
134 hi 6 _Rain24HMax._Max._Value
137 hi 6 _Rain24H (mm)
140 hi 10 _Rain24HMax._Time
145 hi 6 _Rain24HMax._Max._Value
148 hi 6 _Rain24H (mm)
151 hi 1 not used
152 lo 10 _LastRainReset
158 lo 7 _RainTotal (mm)
160 hi 1 _WindDirection5
160 lo 1 _WindDirection4
161 hi 1 _WindDirection3
161 lo 1 _WindDirection2
162 hi 1 _WindDirection1
162 lo 1 _WindDirection (0-15)
163 hi 18 unknown data
172 hi 6 _WindSpeed (km/h)
175 hi 1 _GustDirection5
175 lo 1 _GustDirection4
176 hi 1 _GustDirection3
176 lo 1 _GustDirection2
177 hi 1 _GustDirection1
177 lo 1 _GustDirection (0-15)
178 hi 2 not used
179 hi 10 _GustMax._Max._Time
184 hi 6 _GustMax._Max._Value
187 hi 6 _Gust (km/h)
190 hi 10 4 _PressureRelative_MinMax._Max/Min._Time
195 hi 5 5 _PressureRelative_inHgMinMax._Max._Value
197 lo 5 5 _PressureRelative_hPaMinMax._Max._Value
200 hi 5 _PressureRelative_inHgMinMax._Max._Value
202 lo 5 _PressureRelative_hPaMinMax._Max._Value
205 hi 5 _PressureRelative_inHgMinMax._Min._Value
207 lo 5 _PressureRelative_hPaMinMax._Min._Value
210 hi 5 _PressureRelative_inHg
212 lo 5 _PressureRelative_hPa
214 lo 430 end
Remarks
1 since factory reset
2 since software reset
3 not used?
4 should be: _PressureRelative_MinMax._Max._Time
5 should be: _PressureRelative_MinMax._Min._Time
6 _AlarmRingingFlags (values in hex)
80 00 = Hi Al Gust
40 00 = Al WindDir
20 00 = One or more WindDirs set
10 00 = Hi Al Rain24H
08 00 = Hi Al Outdoor Humidity
04 00 = Lo Al Outdoor Humidity
02 00 = Hi Al Indoor Humidity
01 00 = Lo Al Indoor Humidity
00 80 = Hi Al Outdoor Temp
00 40 = Lo Al Outdoor Temp
00 20 = Hi Al Indoor Temp
00 10 = Lo Al Indoor Temp
00 08 = Hi Al Pressure
00 04 = Lo Al Pressure
00 02 = not used
00 01 = not used
-------------------------------------------------------------------------------
14. History Message
start hi-lo chars rem name
0 hi 4 DevID
2 hi 2 Action
3 hi 2 Quality (%)
4 hi 4 DeviceCS
6 hi 6 LatestAddress
9 hi 6 ThisAddress
12 hi 1 not used
12 lo 3 Gust (m/s)
14 hi 1 WindDirection (0-15, also GustDirection)
14 lo 3 WindSpeed (m/s)
16 hi 3 RainCounterRaw (total in period in 0.1 inch)
17 lo 2 HumidityOutdoor (%)
18 lo 2 HumidityIndoor (%)
19 lo 5 PressureRelative (hPa)
22 hi 3 TempOutdoor (C)
23 lo 3 TempIndoor (C)
25 hi 10 Time
29 lo 60 end
-------------------------------------------------------------------------------
15. Set Config Message
start hi-lo chars rem name
0 hi 4 DevID
2 hi 2 Action
3 hi 2 Quality
4 hi 1 1 _WindspeedFormat
4 lo 0,25 2 _RainFormat
4 lo 0,25 3 _PressureFormat
4 lo 0,25 4 _TemperatureFormat
4 lo 0,25 5 _ClockMode
5 hi 1 _WeatherThreshold
5 lo 1 _StormThreshold
6 hi 1 _LowBatFlags
6 lo 1 6 _LCDContrast
7 hi 4 7 _WindDirAlarmFlags (reverse group 1)
9 hi 4 8 _OtherAlarmFlags (reverse group 1)
11 hi 10 _TempIndoorMinMax._Min._Value (reverse group 2)
_TempIndoorMinMax._Max._Value (reverse group 2)
16 hi 10 _TempOutdoorMinMax._Min._Value (reverse group 3)
_TempOutdoorMinMax._Max._Value (reverse group 3)
21 hi 2 _HumidityIndoorMinMax._Min._Value
22 hi 2 _HumidityIndoorMinMax._Max._Value
23 hi 2 _HumidityOutdoorMinMax._Min._Value
24 hi 2 _HumidityOutdoorMinMax._Max._Value
25 hi 1 not used
25 lo 7 _Rain24HMax._Max._Value (reverse bytes)
29 hi 2 _HistoryInterval
30 hi 1 not used
30 lo 5 _GustMax._Max._Value (reverse bytes)
33 hi 10 _PressureRelative_hPaMinMax._Min._Value (rev grp4)
_PressureRelative_inHgMinMax._Min._Value(rev grp4)
38 hi 10 _PressureRelative_hPaMinMax._Max._Value (rev grp5)
_PressureRelative_inHgMinMax._Max._Value(rev grp5)
43 hi 6 9 _ResetMinMaxFlags
46 hi 4 10 _InBufCS
47 lo 96 end
Remarks
1 0=m/s 1=knots 2=bft 3=km/h 4=mph
2 0=mm 1=inch
3 0=inHg 2=hPa
4 0=F 1=C
5 0=24h 1=12h
6 values 0-7 => LCD contrast 1-8
7 WindDir Alarms (not-reversed values in hex)
80 00 = NNW
40 00 = NW
20 00 = WNW
10 00 = W
08 00 = WSW
04 00 = SW
02 00 = SSW
01 00 = S
00 80 = SSE
00 40 = SE
00 20 = ESE
00 10 = E
00 08 = ENE
00 04 = NE
00 02 = NNE
00 01 = N
8 Other Alarms (not-reversed values in hex)
80 00 = Hi Al Gust
40 00 = Al WindDir
20 00 = One or more WindDirs set
10 00 = Hi Al Rain24H
08 00 = Hi Al Outdoor Humidity
04 00 = Lo Al Outdoor Humidity
02 00 = Hi Al Indoor Humidity
01 00 = Lo Al Indoor Humidity
00 80 = Hi Al Outdoor Temp
00 40 = Lo Al Outdoor Temp
00 20 = Hi Al Indoor Temp
00 10 = Lo Al Indoor Temp
00 08 = Hi Al Pressure
00 04 = Lo Al Pressure
00 02 = not used
00 01 = not used
9 ResetMinMaxFlags (not-reversed values in hex)
"Output only; not included in checksum calc"
80 00 00 = Reset DewpointMax
40 00 00 = Reset DewpointMin
20 00 00 = not used
10 00 00 = Reset WindchillMin*
"*Reset dateTime only; Min._Value is preserved"
08 00 00 = Reset TempOutMax
04 00 00 = Reset TempOutMin
02 00 00 = Reset TempInMax
01 00 00 = Reset TempInMin
00 80 00 = Reset Gust
00 40 00 = not used
00 20 00 = not used
00 10 00 = not used
00 08 00 = Reset HumOutMax
00 04 00 = Reset HumOutMin
00 02 00 = Reset HumInMax
00 01 00 = Reset HumInMin
00 00 80 = not used
00 00 40 = Reset Rain Total
00 00 20 = Reset last month?
00 00 10 = Reset lastweek?
00 00 08 = Reset Rain24H
00 00 04 = Reset Rain1H
00 00 02 = Reset PresRelMax
00 00 01 = Reset PresRelMin
10 Checksum = sum bytes (0-42) + 7
-------------------------------------------------------------------------------
16. Get Config Message
start hi-lo chars rem name
0 hi 4 DevID
2 hi 2 Action
3 hi 2 Quality
4 hi 1 1 _WindspeedFormat
4 lo 0,25 2 _RainFormat
4 lo 0,25 3 _PressureFormat
4 lo 0,25 4 _TemperatureFormat
4 lo 0,25 5 _ClockMode
5 hi 1 _WeatherThreshold
5 lo 1 _StormThreshold
6 hi 1 _LowBatFlags
6 lo 1 6 _LCDContrast
7 hi 4 7 _WindDirAlarmFlags
9 hi 4 8 _OtherAlarmFlags
11 hi 5 _TempIndoorMinMax._Min._Value
13 lo 5 _TempIndoorMinMax._Max._Value
16 hi 5 _TempOutdoorMinMax._Min._Value
18 lo 5 _TempOutdoorMinMax._Max._Value
21 hi 2 _HumidityIndoorMinMax._Max._Value
22 hi 2 _HumidityIndoorMinMax._Min._Value
23 hi 2 _HumidityOutdoorMinMax._Max._Value
24 hi 2 _HumidityOutdoorMinMax._Min._Value
25 hi 1 not used
25 lo 7 _Rain24HMax._Max._Value
29 hi 2 _HistoryInterval
30 hi 5 _GustMax._Max._Value
32 lo 1 not used
33 hi 5 _PressureRelative_hPaMinMax._Min._Value
35 lo 5 _PressureRelative_inHgMinMax._Min._Value
38 hi 5 _PressureRelative_hPaMinMax._Max._Value
40 lo 5 _PressureRelative_inHgMinMax._Max._Value
43 hi 6 9 _ResetMinMaxFlags
46 hi 4 10 _InBufCS
47 lo 96 end
Remarks
1 0=m/s 1=knots 2=bft 3=km/h 4=mph
2 0=mm 1=inch
3 0=inHg 2=hPa
4 0=F 1=C
5 0=24h 1=12h
6 values 0-7 => LCD contrast 1-8
7 WindDir Alarms (values in hex)
80 00 = NNW
40 00 = NW
20 00 = WNW
10 00 = W
08 00 = WSW
04 00 = SW
02 00 = SSW
01 00 = S
00 80 = SSE
00 40 = SE
00 20 = ESE
00 10 = E
00 08 = ENE
00 04 = NE
00 02 = NNE
00 01 = N
8 Other Alarms (values in hex)
80 00 = Hi Al Gust
40 00 = Al WindDir
20 00 = One or more WindDirs set
10 00 = Hi Al Rain24H
08 00 = Hi Al Outdoor Humidity
04 00 = Lo Al Outdoor Humidity
02 00 = Hi Al Indoor Humidity
01 00 = Lo Al Indoor Humidity
00 80 = Hi Al Outdoor Temp
00 40 = Lo Al Outdoor Temp
00 20 = Hi Al Indoor Temp
00 10 = Lo Al Indoor Temp
00 08 = Hi Al Pressure
00 04 = Lo Al Pressure
00 02 = not used
00 01 = not used
9 ResetMinMaxFlags (values in hex)
"Output only; input = 00 00 00"
10 Checksum = sum bytes (0-42) + 7
-------------------------------------------------------------------------------
Examples of messages
readCurrentWeather
Cur 000: 01 2e 60 5f 05 1b 00 00 12 01 30 62 21 54 41 30 62 40 75 36
Cur 020: 59 00 60 70 06 35 00 01 30 62 31 61 21 30 62 30 55 95 92 00
Cur 040: 53 10 05 37 00 01 30 62 01 90 81 30 62 40 90 66 38 00 49 00
Cur 060: 05 37 00 01 30 62 21 53 01 30 62 22 31 75 51 11 50 40 05 13
Cur 080: 80 13 06 22 21 40 13 06 23 19 37 67 52 59 13 06 23 06 09 13
Cur 100: 06 23 16 19 91 65 86 00 00 00 00 00 00 00 00 00 00 00 00 00
Cur 120: 00 00 00 00 00 00 00 00 00 13 06 23 09 59 00 06 19 00 00 51
Cur 140: 13 06 22 20 43 00 01 54 00 00 00 01 30 62 21 51 00 00 38 70
Cur 160: a7 cc 7b 50 09 01 01 00 00 00 00 00 00 fc 00 a7 cc 7b 14 13
Cur 180: 06 23 14 06 0e a0 00 01 b0 00 13 06 23 06 34 03 00 91 01 92
Cur 200: 03 00 91 01 92 02 97 41 00 74 03 00 91 01 92
WeatherState: Sunny(Good) WeatherTendency: Rising(Up) AlarmRingingFlags: 0000
TempIndoor 23.500 Min:20.700 2013-06-24 07:53 Max:25.900 2013-06-22 15:44
HumidityIndoor 59.000 Min:52.000 2013-06-23 19:37 Max:67.000 2013-06-22 21:40
TempOutdoor 13.700 Min:13.100 2013-06-23 05:59 Max:19.200 2013-06-23 16:12
HumidityOutdoor 86.000 Min:65.000 2013-06-23 16:19 Max:91.000 2013-06-23 06:09
Windchill 13.700 Min: 9.000 2013-06-24 09:06 Max:23.800 2013-06-20 19:08
Dewpoint 11.380 Min:10.400 2013-06-22 23:17 Max:15.111 2013-06-22 15:30
WindSpeed 2.520
Gust 4.320 Max:37.440 2013-06-23 14:06
WindDirection WSW GustDirection WSW
WindDirection1 SSE GustDirection1 SSE
WindDirection2 W GustDirection2 W
WindDirection3 W GustDirection3 W
WindDirection4 SSE GustDirection4 SSE
WindDirection5 SW GustDirection5 SW
RainLastMonth 0.000 Max: 0.000 1900-01-01 00:00
RainLastWeek 0.000 Max: 0.000 1900-01-01 00:00
Rain24H 0.510 Max: 6.190 2013-06-23 09:59
Rain1H 0.000 Max: 1.540 2013-06-22 20:43
RainTotal 3.870 LastRainReset 2013-06-22 15:10
PresRelhPa 1019.200 Min:1007.400 2013-06-23 06:34 Max:1019.200 2013-06-23 06:34
PresRel_inHg 30.090 Min: 29.740 2013-06-23 06:34 Max: 30.090 2013-06-23 06:34
Bytes with unknown meaning at 157-165: 50 09 01 01 00 00 00 00 00
-------------------------------------------------------------------------------
readHistory
His 000: 01 2e 80 5f 05 1b 00 7b 32 00 7b 32 00 0c 70 0a 00 08 65 91
His 020: 01 92 53 76 35 13 06 24 09 10
Time 2013-06-24 09:10:00
TempIndoor= 23.5
HumidityIndoor= 59
TempOutdoor= 13.7
HumidityOutdoor= 86
PressureRelative= 1019.2
RainCounterRaw= 0.0
WindDirection= SSE
WindSpeed= 1.0
Gust= 1.2
-------------------------------------------------------------------------------
readConfig
In 000: 01 2e 40 5f 36 53 02 00 00 00 00 81 00 04 10 00 82 00 04 20
In 020: 00 71 41 72 42 00 05 00 00 00 27 10 00 02 83 60 96 01 03 07
In 040: 21 04 01 00 00 00 05 1b
-------------------------------------------------------------------------------
writeConfig
Out 000: 01 2e 40 64 36 53 02 00 00 00 00 00 10 04 00 81 00 20 04 00
Out 020: 82 41 71 42 72 00 00 05 00 00 00 10 27 01 96 60 83 02 01 04
Out 040: 21 07 03 10 00 00 05 1b
OutBufCS= 051b
ClockMode= 0
TemperatureFormat= 1
PressureFormat= 1
RainFormat= 0
WindspeedFormat= 3
WeatherThreshold= 3
StormThreshold= 5
LCDContrast= 2
LowBatFlags= 0
WindDirAlarmFlags= 0000
OtherAlarmFlags= 0000
HistoryInterval= 0
TempIndoor_Min= 1.0
TempIndoor_Max= 41.0
TempOutdoor_Min= 2.0
TempOutdoor_Max= 42.0
HumidityIndoor_Min= 41
HumidityIndoor_Max= 71
HumidityOutdoor_Min= 42
HumidityOutdoor_Max= 72
Rain24HMax= 50.0
GustMax= 100.0
PressureRel_hPa_Min= 960.1
PressureRel_inHg_Min= 28.36
PressureRel_hPa_Max= 1040.1
PressureRel_inHg_Max= 30.72
ResetMinMaxFlags= 100000 (Output only; Input always 00 00 00)
-------------------------------------------------------------------------------
class EHistoryInterval:
Constant Value Message received at
hi01Min = 0 00:00, 00:01, 00:02, 00:03 ... 23:59
hi05Min = 1 00:00, 00:05, 00:10, 00:15 ... 23:55
hi10Min = 2 00:00, 00:10, 00:20, 00:30 ... 23:50
hi15Min = 3 00:00, 00:15, 00:30, 00:45 ... 23:45
hi20Min = 4 00:00, 00:20, 00:40, 01:00 ... 23:40
hi30Min = 5 00:00, 00:30, 01:00, 01:30 ... 23:30
hi60Min = 6 00:00, 01:00, 02:00, 03:00 ... 23:00
hi02Std = 7 00:00, 02:00, 04:00, 06:00 ... 22:00
hi04Std = 8 00:00, 04:00, 08:00, 12:00 ... 20:00
hi06Std = 9 00:00, 06:00, 12:00, 18:00
hi08Std = 0xA 00:00, 08:00, 16:00
hi12Std = 0xB 00:00, 12:00
hi24Std = 0xC 00:00
-------------------------------------------------------------------------------
WS SetTime - Send time to WS
Time 000: 01 2e c0 05 1b 19 14 12 40 62 30 01
time sent: 2013-06-24 12:14:19
-------------------------------------------------------------------------------
ReadConfigFlash data
Ask for frequency correction
rcfo 000: dd 0a 01 f5 cc cc cc cc cc cc cc cc cc cc cc
readConfigFlash frequency correction
rcfi 000: dc 0a 01 f5 00 01 78 a0 01 02 0a 0c 0c 01 2e ff ff ff ff ff
frequency correction: 96416 (0x178a0)
adjusted frequency: 910574957 (3646456d)
Ask for transceiver data
rcfo 000: dd 0a 01 f9 cc cc cc cc cc cc cc cc cc cc cc
readConfigFlash serial number and DevID
rcfi 000: dc 0a 01 f9 01 02 0a 0c 0c 01 2e ff ff ff ff ff ff ff ff ff
transceiver ID: 302 (0x012e)
transceiver serial: 01021012120146
Program Logic
The RF communication thread uses the following logic to communicate with the
weather station console:
Step 1. Perform in a while loop getState commands until state 0xde16
is received.
Step 2. Perform a getFrame command to read the message data.
Step 3. Handle the contents of the message. The type of message depends on
the response type:
Response type (hex):
20: WS SetTime / SetConfig - Data written
confirmation the setTime/setConfig setFrame message has been received
by the console
40: GetConfig
save the contents of the configuration for later use (i.e. a setConfig
message with one ore more parameters changed)
60: Current Weather
handle the weather data of the current weather message
80: Actual / Outstanding History
ignore the data of the actual history record when there is no data gap;
handle the data of a (one) requested history record (note: in step 4 we
can decide to request another history record).
a1: Request First-Time Config
prepare a setFrame first time message
a2: Request SetConfig
prepare a setFrame setConfig message
a3: Request SetTime
prepare a setFrame setTime message
Step 4. When you didn't receive the message in step 3 you asked for (see
step 5 how to request a certain type of message), decide if you want
to ignore or handle the received message. Then go to step 5 to
request for a certain type of message unless the received message
has response type a1, a2 or a3, then prepare first the setFrame
message the wireless console asked for.
Step 5. Decide what kind of message you want to receive next time. The
request is done via a setFrame message (see step 6). It is
not guaranteed that you will receive that kind of message the next
time but setting the proper timing parameters of firstSleep and
nextSleep increase the chance you will get the requested type of
message.
Step 6. The action parameter in the setFrame message sets the type of the
next to receive message.
Action (hex):
00: rtGetHistory - Ask for History message
setSleep(0.300,0.010)
01: rtSetTime - Ask for Send Time to weather station message
setSleep(0.085,0.005)
02: rtSetConfig - Ask for Send Config to weather station message
setSleep(0.300,0.010)
03: rtGetConfig - Ask for Config message
setSleep(0.400,0.400)
05: rtGetCurrent - Ask for Current Weather message
setSleep(0.300,0.010)
c0: Send Time - Send Time to WS
setSleep(0.085,0.005)
40: Send Config - Send Config to WS
setSleep(0.085,0.005)
Note: after the Request First-Time Config message (response type = 0xa1)
perform a rtGetConfig with setSleep(0.085,0.005)
Step 7. Perform a setTX command
Step 8. Go to step 1 to wait for state 0xde16 again.
"""
# TODO: how often is currdat.lst modified with/without hi-speed mode?
# TODO: thread locking around observation data
# TODO: eliminate polling, make MainThread get data as soon as RFThread updates
# TODO: get rid of Length/Buffer construct, replace with a Buffer class or obj
# FIXME: the history retrieval assumes a constant archive interval across all
# history records. this means anything that modifies the archive
# interval should clear the history.
from datetime import datetime
import StringIO
import sys
import syslog
import threading
import time
import traceback
import usb
import weewx.drivers
import weewx.wxformulas
import weeutil.weeutil
DRIVER_NAME = 'WS28xx'
DRIVER_VERSION = '0.33'
def loader(config_dict, engine):
return WS28xxDriver(**config_dict[DRIVER_NAME])
def configurator_loader(config_dict):
return WS28xxConfigurator()
def confeditor_loader():
return WS28xxConfEditor()
# flags for enabling/disabling debug verbosity
DEBUG_COMM = 0
DEBUG_CONFIG_DATA = 0
DEBUG_WEATHER_DATA = 0
DEBUG_HISTORY_DATA = 0
DEBUG_DUMP_FORMAT = 'auto'
def logmsg(dst, msg):
syslog.syslog(dst, 'ws28xx: %s: %s' %
(threading.currentThread().getName(), msg))
def logdbg(msg):
logmsg(syslog.LOG_DEBUG, msg)
def loginf(msg):
logmsg(syslog.LOG_INFO, msg)
def logcrt(msg):
logmsg(syslog.LOG_CRIT, msg)
def logerr(msg):
logmsg(syslog.LOG_ERR, msg)
def log_traceback(dst=syslog.LOG_INFO, prefix='**** '):
sfd = StringIO.StringIO()
traceback.print_exc(file=sfd)
sfd.seek(0)
for line in sfd:
logmsg(dst, prefix + line)
del sfd
def log_frame(n, buf):
logdbg('frame length is %d' % n)
strbuf = ''
for i in xrange(0,n):
strbuf += str('%02x ' % buf[i])
if (i + 1) % 16 == 0:
logdbg(strbuf)
strbuf = ''
if strbuf:
logdbg(strbuf)
def get_datum_diff(v, np, ofl):
if abs(np - v) < 0.001 or abs(ofl - v) < 0.001:
return None
return v
def get_datum_match(v, np, ofl):
if np == v or ofl == v:
return None
return v
def calc_checksum(buf, start, end=None):
if end is None:
end = len(buf[0]) - start
cs = 0
for i in xrange(0, end):
cs += buf[0][i+start]
return cs
def get_next_index(idx):
return get_index(idx + 1)
def get_index(idx):
if idx < 0:
return idx + WS28xxDriver.max_records
elif idx >= WS28xxDriver.max_records:
return idx - WS28xxDriver.max_records
return idx
def tstr_to_ts(tstr):
try:
return int(time.mktime(time.strptime(tstr, "%Y-%m-%d %H:%M:%S")))
except (OverflowError, ValueError, TypeError):
pass
return None
def bytes_to_addr(a, b, c):
return ((((a & 0xF) << 8) | b) << 8) | c
def addr_to_index(addr):
return (addr - 416) / 18
def index_to_addr(idx):
return 18 * idx + 416
def print_dict(data):
for x in sorted(data.keys()):
if x == 'dateTime':
print '%s: %s' % (x, weeutil.weeutil.timestamp_to_string(data[x]))
else:
print '%s: %s' % (x, data[x])
class WS28xxConfEditor(weewx.drivers.AbstractConfEditor):
@property
def default_stanza(self):
return """
[WS28xx]
# This section is for the La Crosse WS-2800 series of weather stations.
# Radio frequency to use between USB transceiver and console: US or EU
# US uses 915 MHz, EU uses 868.3 MHz. Default is US.
transceiver_frequency = US
# The station model, e.g., 'LaCrosse C86234' or 'TFA Primus'
model = LaCrosse WS28xx
# The driver to use:
driver = weewx.drivers.ws28xx
"""
def prompt_for_settings(self):
print "Specify the frequency used between the station and the"
print "transceiver, either 'US' (915 MHz) or 'EU' (868.3 MHz)."
freq = self._prompt('frequency', 'US', ['US', 'EU'])
return {'transceiver_frequency': freq}
class WS28xxConfigurator(weewx.drivers.AbstractConfigurator):
def add_options(self, parser):
super(WS28xxConfigurator, self).add_options(parser)
parser.add_option("--check-transceiver", dest="check",
action="store_true",
help="check USB transceiver")
parser.add_option("--pair", dest="pair", action="store_true",
help="pair the USB transceiver with station console")
parser.add_option("--info", dest="info", action="store_true",
help="display weather station configuration")
parser.add_option("--set-interval", dest="interval",
type=int, metavar="N",
help="set logging interval to N minutes")
parser.add_option("--current", dest="current", action="store_true",
help="get the current weather conditions")
parser.add_option("--history", dest="nrecords", type=int, metavar="N",
help="display N history records")
parser.add_option("--history-since", dest="recmin",
type=int, metavar="N",
help="display history records since N minutes ago")
parser.add_option("--maxtries", dest="maxtries", type=int,
help="maximum number of retries, 0 indicates no max")
def do_options(self, options, parser, config_dict, prompt):
maxtries = 3 if options.maxtries is None else int(options.maxtries)
self.station = WS28xxDriver(**config_dict[DRIVER_NAME])
if options.check:
self.check_transceiver(maxtries)
elif options.pair:
self.pair(maxtries)
elif options.interval is not None:
self.set_interval(maxtries, options.interval, prompt)
elif options.current:
self.show_current(maxtries)
elif options.nrecords is not None:
self.show_history(maxtries, count=options.nrecords)
elif options.recmin is not None:
ts = int(time.time()) - options.recmin * 60
self.show_history(maxtries, ts=ts)
else:
self.show_info(maxtries)
self.station.closePort()
def check_transceiver(self, maxtries):
"""See if the transceiver is installed and operational."""
print 'Checking for transceiver...'
ntries = 0
while ntries < maxtries:
ntries += 1
if self.station.transceiver_is_present():
print 'Transceiver is present'
sn = self.station.get_transceiver_serial()
print 'serial: %s' % sn
tid = self.station.get_transceiver_id()
print 'id: %d (0x%04x)' % (tid, tid)
break
print 'Not found (attempt %d of %d) ...' % (ntries, maxtries)
time.sleep(5)
else:
print 'Transceiver not responding.'
def pair(self, maxtries):
"""Pair the transceiver with the station console."""
print 'Pairing transceiver with console...'
maxwait = 90 # how long to wait between button presses, in seconds
ntries = 0
while ntries < maxtries or maxtries == 0:
if self.station.transceiver_is_paired():
print 'Transceiver is paired to console'
break
ntries += 1
msg = 'Press and hold the [v] key until "PC" appears'
if maxtries > 0:
msg += ' (attempt %d of %d)' % (ntries, maxtries)
else:
msg += ' (attempt %d)' % ntries
print msg
now = start_ts = int(time.time())
while (now - start_ts < maxwait and
not self.station.transceiver_is_paired()):
time.sleep(5)
now = int(time.time())
else:
print 'Transceiver not paired to console.'
def get_interval(self, maxtries):
cfg = self.get_config(maxtries)
if cfg is None:
return None
return getHistoryInterval(cfg['history_interval'])
def get_config(self, maxtries):
start_ts = None
ntries = 0
while ntries < maxtries or maxtries == 0:
cfg = self.station.get_config()
if cfg is not None:
return cfg
ntries += 1
if start_ts is None:
start_ts = int(time.time())
else:
dur = int(time.time()) - start_ts
print 'No data after %d seconds (press SET to sync)' % dur
time.sleep(30)
return None
def set_interval(self, maxtries, interval, prompt):
"""Set the station archive interval"""
print "This feature is not yet implemented"
def show_info(self, maxtries):
"""Query the station then display the settings."""
print 'Querying the station for the configuration...'
cfg = self.get_config(maxtries)
if cfg is not None:
print_dict(cfg)
def show_current(self, maxtries):
"""Get current weather observation."""
print 'Querying the station for current weather data...'
start_ts = None
ntries = 0
while ntries < maxtries or maxtries == 0:
packet = self.station.get_observation()
if packet is not None:
print_dict(packet)
break
ntries += 1
if start_ts is None:
start_ts = int(time.time())
else:
dur = int(time.time()) - start_ts
print 'No data after %d seconds (press SET to sync)' % dur
time.sleep(30)
def show_history(self, maxtries, ts=0, count=0):
"""Display the indicated number of records or the records since the
specified timestamp (local time, in seconds)"""
print "Querying the station for historical records..."
ntries = 0
last_n = nrem = None
last_ts = int(time.time())
self.station.start_caching_history(since_ts=ts, num_rec=count)
while nrem is None or nrem > 0:
if ntries >= maxtries:
print 'Giving up after %d tries' % ntries
break
time.sleep(30)
ntries += 1
now = int(time.time())
n = self.station.get_num_history_scanned()
if n == last_n:
dur = now - last_ts
print 'No data after %d seconds (press SET to sync)' % dur
else:
ntries = 0
last_ts = now
last_n = n
nrem = self.station.get_uncached_history_count()
ni = self.station.get_next_history_index()
li = self.station.get_latest_history_index()
msg = " scanned %s records: current=%s latest=%s remaining=%s\r" % (n, ni, li, nrem)
sys.stdout.write(msg)
sys.stdout.flush()
self.station.stop_caching_history()
records = self.station.get_history_cache_records()
self.station.clear_history_cache()
print
print 'Found %d records' % len(records)
for r in records:
print r
class WS28xxDriver(weewx.drivers.AbstractDevice):
"""Driver for LaCrosse WS28xx stations."""
max_records = 1797
def __init__(self, **stn_dict) :
"""Initialize the station object.
model: Which station model is this?
[Optional. Default is 'LaCrosse WS28xx']
transceiver_frequency: Frequency for transceiver-to-console. Specify
either US or EU.
[Required. Default is US]
polling_interval: How often to sample the USB interface for data.
[Optional. Default is 30 seconds]
comm_interval: Communications mode interval
[Optional. Default is 3]
device_id: The USB device ID for the transceiver. If there are
multiple devices with the same vendor and product IDs on the bus,
each will have a unique device identifier. Use this identifier
to indicate which device should be used.
[Optional. Default is None]
serial: The transceiver serial number. If there are multiple
devices with the same vendor and product IDs on the bus, each will
have a unique serial number. Use the serial number to indicate which
transceiver should be used.
[Optional. Default is None]
"""
self.model = stn_dict.get('model', 'LaCrosse WS28xx')
self.polling_interval = int(stn_dict.get('polling_interval', 30))
self.comm_interval = int(stn_dict.get('comm_interval', 3))
self.frequency = stn_dict.get('transceiver_frequency', 'US')
self.device_id = stn_dict.get('device_id', None)
self.serial = stn_dict.get('serial', None)
self.vendor_id = 0x6666
self.product_id = 0x5555
now = int(time.time())
self._service = None
self._last_rain = None
self._last_obs_ts = None
self._last_nodata_log_ts = now
self._nodata_interval = 300 # how often to check for no data
self._last_contact_log_ts = now
self._nocontact_interval = 300 # how often to check for no contact
self._log_interval = 600 # how often to log
global DEBUG_COMM
DEBUG_COMM = int(stn_dict.get('debug_comm', 0))
global DEBUG_CONFIG_DATA
DEBUG_CONFIG_DATA = int(stn_dict.get('debug_config_data', 0))
global DEBUG_WEATHER_DATA
DEBUG_WEATHER_DATA = int(stn_dict.get('debug_weather_data', 0))
global DEBUG_HISTORY_DATA
DEBUG_HISTORY_DATA = int(stn_dict.get('debug_history_data', 0))
global DEBUG_DUMP_FORMAT
DEBUG_DUMP_FORMAT = stn_dict.get('debug_dump_format', 'auto')
loginf('driver version is %s' % DRIVER_VERSION)
loginf('frequency is %s' % self.frequency)
self.startUp()
@property
def hardware_name(self):
return self.model
# this is invoked by StdEngine as it shuts down
def closePort(self):
self.shutDown()
def genLoopPackets(self):
"""Generator function that continuously returns decoded packets."""
while True:
now = int(time.time()+0.5)
packet = self.get_observation()
if packet is not None:
ts = packet['dateTime']
if self._last_obs_ts is None or self._last_obs_ts != ts:
self._last_obs_ts = ts
self._last_nodata_log_ts = now
self._last_contact_log_ts = now
else:
packet = None
# if no new weather data, return an empty packet
if packet is None:
packet = {'usUnits': weewx.METRIC, 'dateTime': now}
# if no new weather data for awhile, log it
if self._last_obs_ts is None or \
now - self._last_obs_ts > self._nodata_interval:
if now - self._last_nodata_log_ts > self._log_interval:
msg = 'no new weather data'
if self._last_obs_ts is not None:
msg += ' after %d seconds' % (
now - self._last_obs_ts)
loginf(msg)
self._last_nodata_log_ts = now
# if no contact with console for awhile, log it
ts = self.get_last_contact()
if ts is None or now - ts > self._nocontact_interval:
if now - self._last_contact_log_ts > self._log_interval:
msg = 'no contact with console'
if ts is not None:
msg += ' after %d seconds' % (now - ts)
msg += ': press [SET] to sync'
loginf(msg)
self._last_contact_log_ts = now
yield packet
time.sleep(self.polling_interval)
def genStartupRecords(self, ts):
loginf('Scanning historical records')
maxtries = 65
ntries = 0
last_n = n = nrem = None
last_ts = now = int(time.time())
self.start_caching_history(since_ts=ts)
while nrem is None or nrem > 0:
if ntries >= maxtries:
logerr('No historical data after %d tries' % ntries)
return
time.sleep(60)
ntries += 1
now = int(time.time())
n = self.get_num_history_scanned()
if n == last_n:
dur = now - last_ts
loginf('No data after %d seconds (press SET to sync)' % dur)
else:
ntries = 0
last_ts = now
last_n = n
nrem = self.get_uncached_history_count()
ni = self.get_next_history_index()
li = self.get_latest_history_index()
loginf("Scanned %s records: current=%s latest=%s remaining=%s" %
(n, ni, li, nrem))
self.stop_caching_history()
records = self.get_history_cache_records()
self.clear_history_cache()
loginf('Found %d historical records' % len(records))
last_ts = None
for r in records:
if last_ts is not None and r['dateTime'] is not None:
r['usUnits'] = weewx.METRIC
r['interval'] = (r['dateTime'] - last_ts) / 60
yield r
last_ts = r['dateTime']
# FIXME: do not implement hardware record generation until we figure
# out how to query the historical records faster.
# def genArchiveRecords(self, since_ts):
# pass
# FIXME: implement retries for this so that rf thread has time to get
# configuration data from the station
# @property
# def archive_interval(self):
# cfg = self.get_config()
# return getHistoryInterval(cfg['history_interval']) * 60
# FIXME: implement set/get time
# def setTime(self):
# pass
# def getTime(self):
# pass
def startUp(self):
if self._service is not None:
return
self._service = CCommunicationService()
self._service.setup(self.frequency,
self.vendor_id, self.product_id, self.device_id,
self.serial, comm_interval=self.comm_interval)
self._service.startRFThread()
def shutDown(self):
self._service.stopRFThread()
self._service.teardown()
self._service = None
def transceiver_is_present(self):
return self._service.DataStore.getTransceiverPresent()
def transceiver_is_paired(self):
return self._service.DataStore.getDeviceRegistered()
def get_transceiver_serial(self):
return self._service.DataStore.getTransceiverSerNo()
def get_transceiver_id(self):
return self._service.DataStore.getDeviceID()
def get_last_contact(self):
return self._service.getLastStat().last_seen_ts
def get_observation(self):
data = self._service.getWeatherData()
ts = data._timestamp
if ts is None:
return None
# add elements required for weewx LOOP packets
packet = {}
packet['usUnits'] = weewx.METRIC
packet['dateTime'] = ts
# data from the station sensors
packet['inTemp'] = get_datum_diff(data._TempIndoor,
CWeatherTraits.TemperatureNP(),
CWeatherTraits.TemperatureOFL())
packet['inHumidity'] = get_datum_diff(data._HumidityIndoor,
CWeatherTraits.HumidityNP(),
CWeatherTraits.HumidityOFL())
packet['outTemp'] = get_datum_diff(data._TempOutdoor,
CWeatherTraits.TemperatureNP(),
CWeatherTraits.TemperatureOFL())
packet['outHumidity'] = get_datum_diff(data._HumidityOutdoor,
CWeatherTraits.HumidityNP(),
CWeatherTraits.HumidityOFL())
packet['pressure'] = get_datum_diff(data._PressureRelative_hPa,
CWeatherTraits.PressureNP(),
CWeatherTraits.PressureOFL())
packet['windSpeed'] = get_datum_diff(data._WindSpeed,
CWeatherTraits.WindNP(),
CWeatherTraits.WindOFL())
packet['windGust'] = get_datum_diff(data._Gust,
CWeatherTraits.WindNP(),
CWeatherTraits.WindOFL())
packet['windDir'] = getWindDir(data._WindDirection,
packet['windSpeed'])
packet['windGustDir'] = getWindDir(data._GustDirection,
packet['windGust'])
# calculated elements not directly reported by station
packet['rainRate'] = get_datum_match(data._Rain1H,
CWeatherTraits.RainNP(),
CWeatherTraits.RainOFL())
if packet['rainRate'] is not None:
packet['rainRate'] /= 10 # weewx wants cm/hr
rain_total = get_datum_match(data._RainTotal,
CWeatherTraits.RainNP(),
CWeatherTraits.RainOFL())
delta = weewx.wxformulas.calculate_rain(rain_total, self._last_rain)
self._last_rain = rain_total
packet['rain'] = delta
if packet['rain'] is not None:
packet['rain'] /= 10 # weewx wants cm
# track the signal strength and battery levels
laststat = self._service.getLastStat()
packet['rxCheckPercent'] = laststat.LastLinkQuality
packet['windBatteryStatus'] = getBatteryStatus(
laststat.LastBatteryStatus, 'wind')
packet['rainBatteryStatus'] = getBatteryStatus(
laststat.LastBatteryStatus, 'rain')
packet['outTempBatteryStatus'] = getBatteryStatus(
laststat.LastBatteryStatus, 'th')
packet['inTempBatteryStatus'] = getBatteryStatus(
laststat.LastBatteryStatus, 'console')
return packet
def get_config(self):
logdbg('get station configuration')
cfg = self._service.getConfigData().asDict()
cs = cfg.get('checksum_out')
if cs is None or cs == 0:
return None
return cfg
def start_caching_history(self, since_ts=0, num_rec=0):
self._service.startCachingHistory(since_ts, num_rec)
def stop_caching_history(self):
self._service.stopCachingHistory()
def get_uncached_history_count(self):
return self._service.getUncachedHistoryCount()
def get_next_history_index(self):
return self._service.getNextHistoryIndex()
def get_latest_history_index(self):
return self._service.getLatestHistoryIndex()
def get_num_history_scanned(self):
return self._service.getNumHistoryScanned()
def get_history_cache_records(self):
return self._service.getHistoryCacheRecords()
def clear_history_cache(self):
self._service.clearHistoryCache()
def set_interval(self, interval):
# FIXME: set the archive interval
pass
# The following classes and methods are adapted from the implementation by
# eddie de pieri, which is in turn based on the HeavyWeather implementation.
class BadResponse(Exception):
"""raised when unexpected data found in frame buffer"""
pass
class DataWritten(Exception):
"""raised when message 'data written' in frame buffer"""
pass
class BitHandling:
# return a nonzero result, 2**offset, if the bit at 'offset' is one.
@staticmethod
def testBit(int_type, offset):
mask = 1 << offset
return int_type & mask
# return an integer with the bit at 'offset' set to 1.
@staticmethod
def setBit(int_type, offset):
mask = 1 << offset
return int_type | mask
# return an integer with the bit at 'offset' set to 1.
@staticmethod
def setBitVal(int_type, offset, val):
mask = val << offset
return int_type | mask
# return an integer with the bit at 'offset' cleared.
@staticmethod
def clearBit(int_type, offset):
mask = ~(1 << offset)
return int_type & mask
# return an integer with the bit at 'offset' inverted, 0->1 and 1->0.
@staticmethod
def toggleBit(int_type, offset):
mask = 1 << offset
return int_type ^ mask
class EHistoryInterval:
hi01Min = 0
hi05Min = 1
hi10Min = 2
hi15Min = 3
hi20Min = 4
hi30Min = 5
hi60Min = 6
hi02Std = 7
hi04Std = 8
hi06Std = 9
hi08Std = 0xA
hi12Std = 0xB
hi24Std = 0xC
class EWindspeedFormat:
wfMs = 0
wfKnots = 1
wfBFT = 2
wfKmh = 3
wfMph = 4
class ERainFormat:
rfMm = 0
rfInch = 1
class EPressureFormat:
pfinHg = 0
pfHPa = 1
class ETemperatureFormat:
tfFahrenheit = 0
tfCelsius = 1
class EClockMode:
ct24H = 0
ctAmPm = 1
class EWeatherTendency:
TREND_NEUTRAL = 0
TREND_UP = 1
TREND_DOWN = 2
TREND_ERR = 3
class EWeatherState:
WEATHER_BAD = 0
WEATHER_NEUTRAL = 1
WEATHER_GOOD = 2
WEATHER_ERR = 3
class EWindDirection:
wdN = 0
wdNNE = 1
wdNE = 2
wdENE = 3
wdE = 4
wdESE = 5
wdSE = 6
wdSSE = 7
wdS = 8
wdSSW = 9
wdSW = 0x0A
wdWSW = 0x0B
wdW = 0x0C
wdWNW = 0x0D
wdNW = 0x0E
wdNNW = 0x0F
wdERR = 0x10
wdInvalid = 0x11
wdNone = 0x12
def getWindDir(wdir, wspeed):
if wspeed is None or wspeed == 0:
return None
if wdir < 0 or wdir >= 16:
return None
return wdir * 360 / 16
class EResetMinMaxFlags:
rmTempIndoorHi = 0
rmTempIndoorLo = 1
rmTempOutdoorHi = 2
rmTempOutdoorLo = 3
rmWindchillHi = 4
rmWindchillLo = 5
rmDewpointHi = 6
rmDewpointLo = 7
rmHumidityIndoorLo = 8
rmHumidityIndoorHi = 9
rmHumidityOutdoorLo = 0x0A
rmHumidityOutdoorHi = 0x0B
rmWindspeedHi = 0x0C
rmWindspeedLo = 0x0D
rmGustHi = 0x0E
rmGustLo = 0x0F
rmPressureLo = 0x10
rmPressureHi = 0x11
rmRain1hHi = 0x12
rmRain24hHi = 0x13
rmRainLastWeekHi = 0x14
rmRainLastMonthHi = 0x15
rmRainTotal = 0x16
rmInvalid = 0x17
class ERequestType:
rtGetCurrent = 0
rtGetHistory = 1
rtGetConfig = 2
rtSetConfig = 3
rtSetTime = 4
rtFirstConfig = 5
rtINVALID = 6
class EAction:
aGetHistory = 0
aReqSetTime = 1
aReqSetConfig = 2
aGetConfig = 3
aGetCurrent = 5
aSendTime = 0xc0
aSendConfig = 0x40
class ERequestState:
rsQueued = 0
rsRunning = 1
rsFinished = 2
rsPreamble = 3
rsWaitDevice = 4
rsWaitConfig = 5
rsError = 6
rsChanged = 7
rsINVALID = 8
class EResponseType:
rtDataWritten = 0x20
rtGetConfig = 0x40
rtGetCurrentWeather = 0x60
rtGetHistory = 0x80
rtRequest = 0xa0
rtReqFirstConfig = 0xa1
rtReqSetConfig = 0xa2
rtReqSetTime = 0xa3
# frequency standards and their associated transmission frequencies
class EFrequency:
fsUS = 'US'
tfUS = 905000000
fsEU = 'EU'
tfEU = 868300000
def getFrequency(standard):
if standard == EFrequency.fsUS:
return EFrequency.tfUS
elif standard == EFrequency.fsEU:
return EFrequency.tfEU
logerr("unknown frequency standard '%s', using US" % standard)
return EFrequency.tfUS
def getFrequencyStandard(frequency):
if frequency == EFrequency.tfUS:
return EFrequency.fsUS
elif frequency == EFrequency.tfEU:
return EFrequency.fsEU
logerr("unknown frequency '%s', using US" % frequency)
return EFrequency.fsUS
# HWPro presents battery flags as WS/TH/RAIN/WIND
# 0 - wind
# 1 - rain
# 2 - thermo-hygro
# 3 - console
batterybits = {'wind':0, 'rain':1, 'th':2, 'console':3}
def getBatteryStatus(status, flag):
"""Return 1 if bit is set, 0 otherwise"""
bit = batterybits.get(flag)
if bit is None:
return None
if BitHandling.testBit(status, bit):
return 1
return 0
history_intervals = {
EHistoryInterval.hi01Min: 1,
EHistoryInterval.hi05Min: 5,
EHistoryInterval.hi10Min: 10,
EHistoryInterval.hi20Min: 20,
EHistoryInterval.hi30Min: 30,
EHistoryInterval.hi60Min: 60,
EHistoryInterval.hi02Std: 120,
EHistoryInterval.hi04Std: 240,
EHistoryInterval.hi06Std: 360,
EHistoryInterval.hi08Std: 480,
EHistoryInterval.hi12Std: 720,
EHistoryInterval.hi24Std: 1440,
}
def getHistoryInterval(i):
return history_intervals.get(i)
# NP - not present
# OFL - outside factory limits
class CWeatherTraits(object):
windDirMap = {
0: "N", 1: "NNE", 2: "NE", 3: "ENE", 4: "E", 5: "ESE", 6: "SE",
7: "SSE", 8: "S", 9: "SSW", 10: "SW", 11: "WSW", 12: "W",
13: "WNW", 14: "NW", 15: "NWN", 16: "err", 17: "inv", 18: "None" }
forecastMap = {
0: "Rainy(Bad)", 1: "Cloudy(Neutral)", 2: "Sunny(Good)", 3: "Error" }
trendMap = {
0: "Stable(Neutral)", 1: "Rising(Up)", 2: "Falling(Down)", 3: "Error" }
@staticmethod
def TemperatureNP():
return 81.099998
@staticmethod
def TemperatureOFL():
return 136.0
@staticmethod
def PressureNP():
return 10101010.0
@staticmethod
def PressureOFL():
return 16666.5
@staticmethod
def HumidityNP():
return 110.0
@staticmethod
def HumidityOFL():
return 121.0
@staticmethod
def RainNP():
return -0.2
@staticmethod
def RainOFL():
return 16666.664
@staticmethod
def WindNP():
return 183.6 # km/h = 51.0 m/s
@staticmethod
def WindOFL():
return 183.96 # km/h = 51.099998 m/s
@staticmethod
def TemperatureOffset():
return 40.0
class CMeasurement:
_Value = 0.0
_ResetFlag = 23
_IsError = 1
_IsOverflow = 1
_Time = None
def Reset(self):
self._Value = 0.0
self._ResetFlag = 23
self._IsError = 1
self._IsOverflow = 1
class CMinMaxMeasurement(object):
def __init__(self):
self._Min = CMeasurement()
self._Max = CMeasurement()
# firmware XXX has bogus date values for these fields
_bad_labels = ['RainLastMonthMax','RainLastWeekMax','PressureRelativeMin']
class USBHardware(object):
@staticmethod
def isOFL2(buf, start, StartOnHiNibble):
if StartOnHiNibble:
result = (buf[0][start+0] >> 4) == 15 \
or (buf[0][start+0] & 0xF) == 15
else:
result = (buf[0][start+0] & 0xF) == 15 \
or (buf[0][start+1] >> 4) == 15
return result
@staticmethod
def isOFL3(buf, start, StartOnHiNibble):
if StartOnHiNibble:
result = (buf[0][start+0] >> 4) == 15 \
or (buf[0][start+0] & 0xF) == 15 \
or (buf[0][start+1] >> 4) == 15
else:
result = (buf[0][start+0] & 0xF) == 15 \
or (buf[0][start+1] >> 4) == 15 \
or (buf[0][start+1] & 0xF) == 15
return result
@staticmethod
def isOFL5(buf, start, StartOnHiNibble):
if StartOnHiNibble:
result = (buf[0][start+0] >> 4) == 15 \
or (buf[0][start+0] & 0xF) == 15 \
or (buf[0][start+1] >> 4) == 15 \
or (buf[0][start+1] & 0xF) == 15 \
or (buf[0][start+2] >> 4) == 15
else:
result = (buf[0][start+0] & 0xF) == 15 \
or (buf[0][start+1] >> 4) == 15 \
or (buf[0][start+1] & 0xF) == 15 \
or (buf[0][start+2] >> 4) == 15 \
or (buf[0][start+2] & 0xF) == 15
return result
@staticmethod
def isErr2(buf, start, StartOnHiNibble):
if StartOnHiNibble:
result = (buf[0][start+0] >> 4) >= 10 \
and (buf[0][start+0] >> 4) != 15 \
or (buf[0][start+0] & 0xF) >= 10 \
and (buf[0][start+0] & 0xF) != 15
else:
result = (buf[0][start+0] & 0xF) >= 10 \
and (buf[0][start+0] & 0xF) != 15 \
or (buf[0][start+1] >> 4) >= 10 \
and (buf[0][start+1] >> 4) != 15
return result
@staticmethod
def isErr3(buf, start, StartOnHiNibble):
if StartOnHiNibble:
result = (buf[0][start+0] >> 4) >= 10 \
and (buf[0][start+0] >> 4) != 15 \
or (buf[0][start+0] & 0xF) >= 10 \
and (buf[0][start+0] & 0xF) != 15 \
or (buf[0][start+1] >> 4) >= 10 \
and (buf[0][start+1] >> 4) != 15
else:
result = (buf[0][start+0] & 0xF) >= 10 \
and (buf[0][start+0] & 0xF) != 15 \
or (buf[0][start+1] >> 4) >= 10 \
and (buf[0][start+1] >> 4) != 15 \
or (buf[0][start+1] & 0xF) >= 10 \
and (buf[0][start+1] & 0xF) != 15
return result
@staticmethod
def isErr5(buf, start, StartOnHiNibble):
if StartOnHiNibble:
result = (buf[0][start+0] >> 4) >= 10 \
and (buf[0][start+0] >> 4) != 15 \
or (buf[0][start+0] & 0xF) >= 10 \
and (buf[0][start+0] & 0xF) != 15 \
or (buf[0][start+1] >> 4) >= 10 \
and (buf[0][start+1] >> 4) != 15 \
or (buf[0][start+1] & 0xF) >= 10 \
and (buf[0][start+1] & 0xF) != 15 \
or (buf[0][start+2] >> 4) >= 10 \
and (buf[0][start+2] >> 4) != 15
else:
result = (buf[0][start+0] & 0xF) >= 10 \
and (buf[0][start+0] & 0xF) != 15 \
or (buf[0][start+1] >> 4) >= 10 \
and (buf[0][start+1] >> 4) != 15 \
or (buf[0][start+1] & 0xF) >= 10 \
and (buf[0][start+1] & 0xF) != 15 \
or (buf[0][start+2] >> 4) >= 10 \
and (buf[0][start+2] >> 4) != 15 \
or (buf[0][start+2] & 0xF) >= 10 \
and (buf[0][start+2] & 0xF) != 15
return result
@staticmethod
def reverseByteOrder(buf, start, Count):
nbuf=buf[0]
for i in xrange(0, Count >> 1):
tmp = nbuf[start + i]
nbuf[start + i] = nbuf[start + Count - i - 1]
nbuf[start + Count - i - 1 ] = tmp
buf[0]=nbuf
@staticmethod
def readWindDirectionShared(buf, start):
return (buf[0][0+start] & 0xF, buf[0][start] >> 4)
@staticmethod
def toInt_2(buf, start, StartOnHiNibble):
"""read 2 nibbles"""
if StartOnHiNibble:
rawpre = (buf[0][start+0] >> 4)* 10 \
+ (buf[0][start+0] & 0xF)* 1
else:
rawpre = (buf[0][start+0] & 0xF)* 10 \
+ (buf[0][start+1] >> 4)* 1
return rawpre
@staticmethod
def toRain_7_3(buf, start, StartOnHiNibble):
"""read 7 nibbles, presentation with 3 decimals; units of mm"""
if (USBHardware.isErr2(buf, start+0, StartOnHiNibble) or
USBHardware.isErr5(buf, start+1, StartOnHiNibble)):
result = CWeatherTraits.RainNP()
elif (USBHardware.isOFL2(buf, start+0, StartOnHiNibble) or
USBHardware.isOFL5(buf, start+1, StartOnHiNibble)):
result = CWeatherTraits.RainOFL()
elif StartOnHiNibble:
result = (buf[0][start+0] >> 4)* 1000 \
+ (buf[0][start+0] & 0xF)* 100 \
+ (buf[0][start+1] >> 4)* 10 \
+ (buf[0][start+1] & 0xF)* 1 \
+ (buf[0][start+2] >> 4)* 0.1 \
+ (buf[0][start+2] & 0xF)* 0.01 \
+ (buf[0][start+3] >> 4)* 0.001
else:
result = (buf[0][start+0] & 0xF)* 1000 \
+ (buf[0][start+1] >> 4)* 100 \
+ (buf[0][start+1] & 0xF)* 10 \
+ (buf[0][start+2] >> 4)* 1 \
+ (buf[0][start+2] & 0xF)* 0.1 \
+ (buf[0][start+3] >> 4)* 0.01 \
+ (buf[0][start+3] & 0xF)* 0.001
return result
@staticmethod
def toRain_6_2(buf, start, StartOnHiNibble):
'''read 6 nibbles, presentation with 2 decimals; units of mm'''
if (USBHardware.isErr2(buf, start+0, StartOnHiNibble) or
USBHardware.isErr2(buf, start+1, StartOnHiNibble) or
USBHardware.isErr2(buf, start+2, StartOnHiNibble) ):
result = CWeatherTraits.RainNP()
elif (USBHardware.isOFL2(buf, start+0, StartOnHiNibble) or
USBHardware.isOFL2(buf, start+1, StartOnHiNibble) or
USBHardware.isOFL2(buf, start+2, StartOnHiNibble)):
result = CWeatherTraits.RainOFL()
elif StartOnHiNibble:
result = (buf[0][start+0] >> 4)* 1000 \
+ (buf[0][start+0] & 0xF)* 100 \
+ (buf[0][start+1] >> 4)* 10 \
+ (buf[0][start+1] & 0xF)* 1 \
+ (buf[0][start+2] >> 4)* 0.1 \
+ (buf[0][start+2] & 0xF)* 0.01
else:
result = (buf[0][start+0] & 0xF)* 1000 \
+ (buf[0][start+1] >> 4)* 100 \
+ (buf[0][start+1] & 0xF)* 10 \
+ (buf[0][start+2] >> 4)* 1 \
+ (buf[0][start+2] & 0xF)* 0.1 \
+ (buf[0][start+3] >> 4)* 0.01
return result
@staticmethod
def toRain_3_1(buf, start, StartOnHiNibble):
"""read 3 nibbles, presentation with 1 decimal; units of 0.1 inch"""
if StartOnHiNibble:
hibyte = buf[0][start+0]
lobyte = (buf[0][start+1] >> 4) & 0xF
else:
hibyte = 16*(buf[0][start+0] & 0xF) + ((buf[0][start+1] >> 4) & 0xF)
lobyte = buf[0][start+1] & 0xF
if hibyte == 0xFF and lobyte == 0xE :
result = CWeatherTraits.RainNP()
elif hibyte == 0xFF and lobyte == 0xF :
result = CWeatherTraits.RainOFL()
else:
val = USBHardware.toFloat_3_1(buf, start, StartOnHiNibble) # 0.1 inch
result = val * 2.54 # mm
return result
@staticmethod
def toFloat_3_1(buf, start, StartOnHiNibble):
"""read 3 nibbles, presentation with 1 decimal"""
if StartOnHiNibble:
result = (buf[0][start+0] >> 4)*16**2 \
+ (buf[0][start+0] & 0xF)* 16**1 \
+ (buf[0][start+1] >> 4)* 16**0
else:
result = (buf[0][start+0] & 0xF)*16**2 \
+ (buf[0][start+1] >> 4)* 16**1 \
+ (buf[0][start+1] & 0xF)* 16**0
result = result / 10.0
return result
@staticmethod
def toDateTime(buf, start, StartOnHiNibble, label):
"""read 10 nibbles, presentation as DateTime"""
result = None
if (USBHardware.isErr2(buf, start+0, StartOnHiNibble)
or USBHardware.isErr2(buf, start+1, StartOnHiNibble)
or USBHardware.isErr2(buf, start+2, StartOnHiNibble)
or USBHardware.isErr2(buf, start+3, StartOnHiNibble)
or USBHardware.isErr2(buf, start+4, StartOnHiNibble)):
logerr('ToDateTime: bogus date for %s: error status in buffer' %
label)
else:
year = USBHardware.toInt_2(buf, start+0, StartOnHiNibble) + 2000
month = USBHardware.toInt_2(buf, start+1, StartOnHiNibble)
days = USBHardware.toInt_2(buf, start+2, StartOnHiNibble)
hours = USBHardware.toInt_2(buf, start+3, StartOnHiNibble)
minutes = USBHardware.toInt_2(buf, start+4, StartOnHiNibble)
try:
result = datetime(year, month, days, hours, minutes)
except ValueError:
if label not in _bad_labels:
logerr(('ToDateTime: bogus date for %s:'
' bad date conversion from'
' %s %s %s %s %s') %
(label, minutes, hours, days, month, year))
if result is None:
# FIXME: use None instead of a really old date to indicate invalid
result = datetime(1900, 01, 01, 00, 00)
return result
@staticmethod
def toHumidity_2_0(buf, start, StartOnHiNibble):
"""read 2 nibbles, presentation with 0 decimal"""
if USBHardware.isErr2(buf, start+0, StartOnHiNibble):
result = CWeatherTraits.HumidityNP()
elif USBHardware.isOFL2(buf, start+0, StartOnHiNibble):
result = CWeatherTraits.HumidityOFL()
else:
result = USBHardware.toInt_2(buf, start, StartOnHiNibble)
return result
@staticmethod
def toTemperature_5_3(buf, start, StartOnHiNibble):
"""read 5 nibbles, presentation with 3 decimals; units of degree C"""
if USBHardware.isErr5(buf, start+0, StartOnHiNibble):
result = CWeatherTraits.TemperatureNP()
elif USBHardware.isOFL5(buf, start+0, StartOnHiNibble):
result = CWeatherTraits.TemperatureOFL()
else:
if StartOnHiNibble:
rawtemp = (buf[0][start+0] >> 4)* 10 \
+ (buf[0][start+0] & 0xF)* 1 \
+ (buf[0][start+1] >> 4)* 0.1 \
+ (buf[0][start+1] & 0xF)* 0.01 \
+ (buf[0][start+2] >> 4)* 0.001
else:
rawtemp = (buf[0][start+0] & 0xF)* 10 \
+ (buf[0][start+1] >> 4)* 1 \
+ (buf[0][start+1] & 0xF)* 0.1 \
+ (buf[0][start+2] >> 4)* 0.01 \
+ (buf[0][start+2] & 0xF)* 0.001
result = rawtemp - CWeatherTraits.TemperatureOffset()
return result
@staticmethod
def toTemperature_3_1(buf, start, StartOnHiNibble):
"""read 3 nibbles, presentation with 1 decimal; units of degree C"""
if USBHardware.isErr3(buf, start+0, StartOnHiNibble):
result = CWeatherTraits.TemperatureNP()
elif USBHardware.isOFL3(buf, start+0, StartOnHiNibble):
result = CWeatherTraits.TemperatureOFL()
else:
if StartOnHiNibble :
rawtemp = (buf[0][start+0] >> 4)* 10 \
+ (buf[0][start+0] & 0xF)* 1 \
+ (buf[0][start+1] >> 4)* 0.1
else:
rawtemp = (buf[0][start+0] & 0xF)* 10 \
+ (buf[0][start+1] >> 4)* 1 \
+ (buf[0][start+1] & 0xF)* 0.1
result = rawtemp - CWeatherTraits.TemperatureOffset()
return result
@staticmethod
def toWindspeed_6_2(buf, start):
"""read 6 nibbles, presentation with 2 decimals; units of km/h"""
result = (buf[0][start+0] >> 4)* 16**5 \
+ (buf[0][start+0] & 0xF)* 16**4 \
+ (buf[0][start+1] >> 4)* 16**3 \
+ (buf[0][start+1] & 0xF)* 16**2 \
+ (buf[0][start+2] >> 4)* 16**1 \
+ (buf[0][start+2] & 0xF)
result /= 256.0
result /= 100.0 # km/h
return result
@staticmethod
def toWindspeed_3_1(buf, start, StartOnHiNibble):
"""read 3 nibbles, presentation with 1 decimal; units of m/s"""
if StartOnHiNibble :
hibyte = buf[0][start+0]
lobyte = (buf[0][start+1] >> 4) & 0xF
else:
hibyte = 16*(buf[0][start+0] & 0xF) + ((buf[0][start+1] >> 4) & 0xF)
lobyte = buf[0][start+1] & 0xF
if hibyte == 0xFF and lobyte == 0xE:
result = CWeatherTraits.WindNP()
elif hibyte == 0xFF and lobyte == 0xF:
result = CWeatherTraits.WindOFL()
else:
result = USBHardware.toFloat_3_1(buf, start, StartOnHiNibble) # m/s
result *= 3.6 # km/h
return result
@staticmethod
def readPressureShared(buf, start, StartOnHiNibble):
return (USBHardware.toPressure_hPa_5_1(buf,start+2,1-StartOnHiNibble),
USBHardware.toPressure_inHg_5_2(buf,start,StartOnHiNibble))
@staticmethod
def toPressure_hPa_5_1(buf, start, StartOnHiNibble):
"""read 5 nibbles, presentation with 1 decimal; units of hPa (mbar)"""
if USBHardware.isErr5(buf, start+0, StartOnHiNibble):
result = CWeatherTraits.PressureNP()
elif USBHardware.isOFL5(buf, start+0, StartOnHiNibble):
result = CWeatherTraits.PressureOFL()
elif StartOnHiNibble :
result = (buf[0][start+0] >> 4)* 1000 \
+ (buf[0][start+0] & 0xF)* 100 \
+ (buf[0][start+1] >> 4)* 10 \
+ (buf[0][start+1] & 0xF)* 1 \
+ (buf[0][start+2] >> 4)* 0.1
else:
result = (buf[0][start+0] & 0xF)* 1000 \
+ (buf[0][start+1] >> 4)* 100 \
+ (buf[0][start+1] & 0xF)* 10 \
+ (buf[0][start+2] >> 4)* 1 \
+ (buf[0][start+2] & 0xF)* 0.1
return result
@staticmethod
def toPressure_inHg_5_2(buf, start, StartOnHiNibble):
"""read 5 nibbles, presentation with 2 decimals; units of inHg"""
if USBHardware.isErr5(buf, start+0, StartOnHiNibble):
result = CWeatherTraits.PressureNP()
elif USBHardware.isOFL5(buf, start+0, StartOnHiNibble):
result = CWeatherTraits.PressureOFL()
elif StartOnHiNibble :
result = (buf[0][start+0] >> 4)* 100 \
+ (buf[0][start+0] & 0xF)* 10 \
+ (buf[0][start+1] >> 4)* 1 \
+ (buf[0][start+1] & 0xF)* 0.1 \
+ (buf[0][start+2] >> 4)* 0.01
else:
result = (buf[0][start+0] & 0xF)* 100 \
+ (buf[0][start+1] >> 4)* 10 \
+ (buf[0][start+1] & 0xF)* 1 \
+ (buf[0][start+2] >> 4)* 0.1 \
+ (buf[0][start+2] & 0xF)* 0.01
return result
class CCurrentWeatherData(object):
def __init__(self):
self._timestamp = None
self._checksum = None
self._PressureRelative_hPa = CWeatherTraits.PressureNP()
self._PressureRelative_hPaMinMax = CMinMaxMeasurement()
self._PressureRelative_inHg = CWeatherTraits.PressureNP()
self._PressureRelative_inHgMinMax = CMinMaxMeasurement()
self._WindSpeed = CWeatherTraits.WindNP()
self._WindDirection = EWindDirection.wdNone
self._WindDirection1 = EWindDirection.wdNone
self._WindDirection2 = EWindDirection.wdNone
self._WindDirection3 = EWindDirection.wdNone
self._WindDirection4 = EWindDirection.wdNone
self._WindDirection5 = EWindDirection.wdNone
self._Gust = CWeatherTraits.WindNP()
self._GustMax = CMinMaxMeasurement()
self._GustDirection = EWindDirection.wdNone
self._GustDirection1 = EWindDirection.wdNone
self._GustDirection2 = EWindDirection.wdNone
self._GustDirection3 = EWindDirection.wdNone
self._GustDirection4 = EWindDirection.wdNone
self._GustDirection5 = EWindDirection.wdNone
self._Rain1H = CWeatherTraits.RainNP()
self._Rain1HMax = CMinMaxMeasurement()
self._Rain24H = CWeatherTraits.RainNP()
self._Rain24HMax = CMinMaxMeasurement()
self._RainLastWeek = CWeatherTraits.RainNP()
self._RainLastWeekMax = CMinMaxMeasurement()
self._RainLastMonth = CWeatherTraits.RainNP()
self._RainLastMonthMax = CMinMaxMeasurement()
self._RainTotal = CWeatherTraits.RainNP()
self._LastRainReset = None
self._TempIndoor = CWeatherTraits.TemperatureNP()
self._TempIndoorMinMax = CMinMaxMeasurement()
self._TempOutdoor = CWeatherTraits.TemperatureNP()
self._TempOutdoorMinMax = CMinMaxMeasurement()
self._HumidityIndoor = CWeatherTraits.HumidityNP()
self._HumidityIndoorMinMax = CMinMaxMeasurement()
self._HumidityOutdoor = CWeatherTraits.HumidityNP()
self._HumidityOutdoorMinMax = CMinMaxMeasurement()
self._Dewpoint = CWeatherTraits.TemperatureNP()
self._DewpointMinMax = CMinMaxMeasurement()
self._Windchill = CWeatherTraits.TemperatureNP()
self._WindchillMinMax = CMinMaxMeasurement()
self._WeatherState = EWeatherState.WEATHER_ERR
self._WeatherTendency = EWeatherTendency.TREND_ERR
self._AlarmRingingFlags = 0
self._AlarmMarkedFlags = 0
self._PresRel_hPa_Max = 0.0
self._PresRel_inHg_Max = 0.0
@staticmethod
def calcChecksum(buf):
return calc_checksum(buf, 6)
def checksum(self):
return self._checksum
def read(self, buf):
self._timestamp = int(time.time() + 0.5)
self._checksum = CCurrentWeatherData.calcChecksum(buf)
nbuf = [0]
nbuf[0] = buf[0]
self._StartBytes = nbuf[0][6]*0xF + nbuf[0][7] # FIXME: what is this?
self._WeatherTendency = (nbuf[0][8] >> 4) & 0xF
if self._WeatherTendency > 3:
self._WeatherTendency = 3
self._WeatherState = nbuf[0][8] & 0xF
if self._WeatherState > 3:
self._WeatherState = 3
self._TempIndoorMinMax._Max._Value = USBHardware.toTemperature_5_3(nbuf, 19, 0)
self._TempIndoorMinMax._Min._Value = USBHardware.toTemperature_5_3(nbuf, 22, 1)
self._TempIndoor = USBHardware.toTemperature_5_3(nbuf, 24, 0)
self._TempIndoorMinMax._Min._IsError = (self._TempIndoorMinMax._Min._Value == CWeatherTraits.TemperatureNP())
self._TempIndoorMinMax._Min._IsOverflow = (self._TempIndoorMinMax._Min._Value == CWeatherTraits.TemperatureOFL())
self._TempIndoorMinMax._Max._IsError = (self._TempIndoorMinMax._Max._Value == CWeatherTraits.TemperatureNP())
self._TempIndoorMinMax._Max._IsOverflow = (self._TempIndoorMinMax._Max._Value == CWeatherTraits.TemperatureOFL())
self._TempIndoorMinMax._Max._Time = None if self._TempIndoorMinMax._Max._IsError or self._TempIndoorMinMax._Max._IsOverflow else USBHardware.toDateTime(nbuf, 9, 0, 'TempIndoorMax')
self._TempIndoorMinMax._Min._Time = None if self._TempIndoorMinMax._Min._IsError or self._TempIndoorMinMax._Min._IsOverflow else USBHardware.toDateTime(nbuf, 14, 0, 'TempIndoorMin')
self._TempOutdoorMinMax._Max._Value = USBHardware.toTemperature_5_3(nbuf, 37, 0)
self._TempOutdoorMinMax._Min._Value = USBHardware.toTemperature_5_3(nbuf, 40, 1)
self._TempOutdoor = USBHardware.toTemperature_5_3(nbuf, 42, 0)
self._TempOutdoorMinMax._Min._IsError = (self._TempOutdoorMinMax._Min._Value == CWeatherTraits.TemperatureNP())
self._TempOutdoorMinMax._Min._IsOverflow = (self._TempOutdoorMinMax._Min._Value == CWeatherTraits.TemperatureOFL())
self._TempOutdoorMinMax._Max._IsError = (self._TempOutdoorMinMax._Max._Value == CWeatherTraits.TemperatureNP())
self._TempOutdoorMinMax._Max._IsOverflow = (self._TempOutdoorMinMax._Max._Value == CWeatherTraits.TemperatureOFL())
self._TempOutdoorMinMax._Max._Time = None if self._TempOutdoorMinMax._Max._IsError or self._TempOutdoorMinMax._Max._IsOverflow else USBHardware.toDateTime(nbuf, 27, 0, 'TempOutdoorMax')
self._TempOutdoorMinMax._Min._Time = None if self._TempOutdoorMinMax._Min._IsError or self._TempOutdoorMinMax._Min._IsOverflow else USBHardware.toDateTime(nbuf, 32, 0, 'TempOutdoorMin')
self._WindchillMinMax._Max._Value = USBHardware.toTemperature_5_3(nbuf, 55, 0)
self._WindchillMinMax._Min._Value = USBHardware.toTemperature_5_3(nbuf, 58, 1)
self._Windchill = USBHardware.toTemperature_5_3(nbuf, 60, 0)
self._WindchillMinMax._Min._IsError = (self._WindchillMinMax._Min._Value == CWeatherTraits.TemperatureNP())
self._WindchillMinMax._Min._IsOverflow = (self._WindchillMinMax._Min._Value == CWeatherTraits.TemperatureOFL())
self._WindchillMinMax._Max._IsError = (self._WindchillMinMax._Max._Value == CWeatherTraits.TemperatureNP())
self._WindchillMinMax._Max._IsOverflow = (self._WindchillMinMax._Max._Value == CWeatherTraits.TemperatureOFL())
self._WindchillMinMax._Max._Time = None if self._WindchillMinMax._Max._IsError or self._WindchillMinMax._Max._IsOverflow else USBHardware.toDateTime(nbuf, 45, 0, 'WindchillMax')
self._WindchillMinMax._Min._Time = None if self._WindchillMinMax._Min._IsError or self._WindchillMinMax._Min._IsOverflow else USBHardware.toDateTime(nbuf, 50, 0, 'WindchillMin')
self._DewpointMinMax._Max._Value = USBHardware.toTemperature_5_3(nbuf, 73, 0)
self._DewpointMinMax._Min._Value = USBHardware.toTemperature_5_3(nbuf, 76, 1)
self._Dewpoint = USBHardware.toTemperature_5_3(nbuf, 78, 0)
self._DewpointMinMax._Min._IsError = (self._DewpointMinMax._Min._Value == CWeatherTraits.TemperatureNP())
self._DewpointMinMax._Min._IsOverflow = (self._DewpointMinMax._Min._Value == CWeatherTraits.TemperatureOFL())
self._DewpointMinMax._Max._IsError = (self._DewpointMinMax._Max._Value == CWeatherTraits.TemperatureNP())
self._DewpointMinMax._Max._IsOverflow = (self._DewpointMinMax._Max._Value == CWeatherTraits.TemperatureOFL())
self._DewpointMinMax._Min._Time = None if self._DewpointMinMax._Min._IsError or self._DewpointMinMax._Min._IsOverflow else USBHardware.toDateTime(nbuf, 68, 0, 'DewpointMin')
self._DewpointMinMax._Max._Time = None if self._DewpointMinMax._Max._IsError or self._DewpointMinMax._Max._IsOverflow else USBHardware.toDateTime(nbuf, 63, 0, 'DewpointMax')
self._HumidityIndoorMinMax._Max._Value = USBHardware.toHumidity_2_0(nbuf, 91, 1)
self._HumidityIndoorMinMax._Min._Value = USBHardware.toHumidity_2_0(nbuf, 92, 1)
self._HumidityIndoor = USBHardware.toHumidity_2_0(nbuf, 93, 1)
self._HumidityIndoorMinMax._Min._IsError = (self._HumidityIndoorMinMax._Min._Value == CWeatherTraits.HumidityNP())
self._HumidityIndoorMinMax._Min._IsOverflow = (self._HumidityIndoorMinMax._Min._Value == CWeatherTraits.HumidityOFL())
self._HumidityIndoorMinMax._Max._IsError = (self._HumidityIndoorMinMax._Max._Value == CWeatherTraits.HumidityNP())
self._HumidityIndoorMinMax._Max._IsOverflow = (self._HumidityIndoorMinMax._Max._Value == CWeatherTraits.HumidityOFL())
self._HumidityIndoorMinMax._Max._Time = None if self._HumidityIndoorMinMax._Max._IsError or self._HumidityIndoorMinMax._Max._IsOverflow else USBHardware.toDateTime(nbuf, 81, 1, 'HumidityIndoorMax')
self._HumidityIndoorMinMax._Min._Time = None if self._HumidityIndoorMinMax._Min._IsError or self._HumidityIndoorMinMax._Min._IsOverflow else USBHardware.toDateTime(nbuf, 86, 1, 'HumidityIndoorMin')
self._HumidityOutdoorMinMax._Max._Value = USBHardware.toHumidity_2_0(nbuf, 104, 1)
self._HumidityOutdoorMinMax._Min._Value = USBHardware.toHumidity_2_0(nbuf, 105, 1)
self._HumidityOutdoor = USBHardware.toHumidity_2_0(nbuf, 106, 1)
self._HumidityOutdoorMinMax._Min._IsError = (self._HumidityOutdoorMinMax._Min._Value == CWeatherTraits.HumidityNP())
self._HumidityOutdoorMinMax._Min._IsOverflow = (self._HumidityOutdoorMinMax._Min._Value == CWeatherTraits.HumidityOFL())
self._HumidityOutdoorMinMax._Max._IsError = (self._HumidityOutdoorMinMax._Max._Value == CWeatherTraits.HumidityNP())
self._HumidityOutdoorMinMax._Max._IsOverflow = (self._HumidityOutdoorMinMax._Max._Value == CWeatherTraits.HumidityOFL())
self._HumidityOutdoorMinMax._Max._Time = None if self._HumidityOutdoorMinMax._Max._IsError or self._HumidityOutdoorMinMax._Max._IsOverflow else USBHardware.toDateTime(nbuf, 94, 1, 'HumidityOutdoorMax')
self._HumidityOutdoorMinMax._Min._Time = None if self._HumidityOutdoorMinMax._Min._IsError or self._HumidityOutdoorMinMax._Min._IsOverflow else USBHardware.toDateTime(nbuf, 99, 1, 'HumidityOutdoorMin')
self._RainLastMonthMax._Max._Time = USBHardware.toDateTime(nbuf, 107, 1, 'RainLastMonthMax')
self._RainLastMonthMax._Max._Value = USBHardware.toRain_6_2(nbuf, 112, 1)
self._RainLastMonth = USBHardware.toRain_6_2(nbuf, 115, 1)
self._RainLastWeekMax._Max._Time = USBHardware.toDateTime(nbuf, 118, 1, 'RainLastWeekMax')
self._RainLastWeekMax._Max._Value = USBHardware.toRain_6_2(nbuf, 123, 1)
self._RainLastWeek = USBHardware.toRain_6_2(nbuf, 126, 1)
self._Rain24HMax._Max._Time = USBHardware.toDateTime(nbuf, 129, 1, 'Rain24HMax')
self._Rain24HMax._Max._Value = USBHardware.toRain_6_2(nbuf, 134, 1)
self._Rain24H = USBHardware.toRain_6_2(nbuf, 137, 1)
self._Rain1HMax._Max._Time = USBHardware.toDateTime(nbuf, 140, 1, 'Rain1HMax')
self._Rain1HMax._Max._Value = USBHardware.toRain_6_2(nbuf, 145, 1)
self._Rain1H = USBHardware.toRain_6_2(nbuf, 148, 1)
self._LastRainReset = USBHardware.toDateTime(nbuf, 151, 0, 'LastRainReset')
self._RainTotal = USBHardware.toRain_7_3(nbuf, 156, 0)
(w ,w1) = USBHardware.readWindDirectionShared(nbuf, 162)
(w2,w3) = USBHardware.readWindDirectionShared(nbuf, 161)
(w4,w5) = USBHardware.readWindDirectionShared(nbuf, 160)
self._WindDirection = w
self._WindDirection1 = w1
self._WindDirection2 = w2
self._WindDirection3 = w3
self._WindDirection4 = w4
self._WindDirection5 = w5
if DEBUG_WEATHER_DATA > 2:
unknownbuf = [0]*9
for i in xrange(0,9):
unknownbuf[i] = nbuf[163+i]
strbuf = ""
for i in unknownbuf:
strbuf += str("%.2x " % i)
logdbg('Bytes with unknown meaning at 157-165: %s' % strbuf)
self._WindSpeed = USBHardware.toWindspeed_6_2(nbuf, 172)
# FIXME: read the WindErrFlags
(g ,g1) = USBHardware.readWindDirectionShared(nbuf, 177)
(g2,g3) = USBHardware.readWindDirectionShared(nbuf, 176)
(g4,g5) = USBHardware.readWindDirectionShared(nbuf, 175)
self._GustDirection = g
self._GustDirection1 = g1
self._GustDirection2 = g2
self._GustDirection3 = g3
self._GustDirection4 = g4
self._GustDirection5 = g5
self._GustMax._Max._Value = USBHardware.toWindspeed_6_2(nbuf, 184)
self._GustMax._Max._IsError = (self._GustMax._Max._Value == CWeatherTraits.WindNP())
self._GustMax._Max._IsOverflow = (self._GustMax._Max._Value == CWeatherTraits.WindOFL())
self._GustMax._Max._Time = None if self._GustMax._Max._IsError or self._GustMax._Max._IsOverflow else USBHardware.toDateTime(nbuf, 179, 1, 'GustMax')
self._Gust = USBHardware.toWindspeed_6_2(nbuf, 187)
# Apparently the station returns only ONE date time for both hPa/inHg
# Min Time Reset and Max Time Reset
self._PressureRelative_hPaMinMax._Max._Time = USBHardware.toDateTime(nbuf, 190, 1, 'PressureRelative_hPaMax')
self._PressureRelative_inHgMinMax._Max._Time = self._PressureRelative_hPaMinMax._Max._Time
self._PressureRelative_hPaMinMax._Min._Time = self._PressureRelative_hPaMinMax._Max._Time # firmware bug, should be: USBHardware.toDateTime(nbuf, 195, 1)
self._PressureRelative_inHgMinMax._Min._Time = self._PressureRelative_hPaMinMax._Min._Time
(self._PresRel_hPa_Max, self._PresRel_inHg_Max) = USBHardware.readPressureShared(nbuf, 195, 1) # firmware bug, should be: self._PressureRelative_hPaMinMax._Min._Time
(self._PressureRelative_hPaMinMax._Max._Value, self._PressureRelative_inHgMinMax._Max._Value) = USBHardware.readPressureShared(nbuf, 200, 1)
(self._PressureRelative_hPaMinMax._Min._Value, self._PressureRelative_inHgMinMax._Min._Value) = USBHardware.readPressureShared(nbuf, 205, 1)
(self._PressureRelative_hPa, self._PressureRelative_inHg) = USBHardware.readPressureShared(nbuf, 210, 1)
def toLog(self):
logdbg("_WeatherState=%s _WeatherTendency=%s _AlarmRingingFlags %04x" % (CWeatherTraits.forecastMap[self._WeatherState], CWeatherTraits.trendMap[self._WeatherTendency], self._AlarmRingingFlags))
logdbg("_TempIndoor= %8.3f _Min=%8.3f (%s) _Max=%8.3f (%s)" % (self._TempIndoor, self._TempIndoorMinMax._Min._Value, self._TempIndoorMinMax._Min._Time, self._TempIndoorMinMax._Max._Value, self._TempIndoorMinMax._Max._Time))
logdbg("_HumidityIndoor= %8.3f _Min=%8.3f (%s) _Max=%8.3f (%s)" % (self._HumidityIndoor, self._HumidityIndoorMinMax._Min._Value, self._HumidityIndoorMinMax._Min._Time, self._HumidityIndoorMinMax._Max._Value, self._HumidityIndoorMinMax._Max._Time))
logdbg("_TempOutdoor= %8.3f _Min=%8.3f (%s) _Max=%8.3f (%s)" % (self._TempOutdoor, self._TempOutdoorMinMax._Min._Value, self._TempOutdoorMinMax._Min._Time, self._TempOutdoorMinMax._Max._Value, self._TempOutdoorMinMax._Max._Time))
logdbg("_HumidityOutdoor=%8.3f _Min=%8.3f (%s) _Max=%8.3f (%s)" % (self._HumidityOutdoor, self._HumidityOutdoorMinMax._Min._Value, self._HumidityOutdoorMinMax._Min._Time, self._HumidityOutdoorMinMax._Max._Value, self._HumidityOutdoorMinMax._Max._Time))
logdbg("_Windchill= %8.3f _Min=%8.3f (%s) _Max=%8.3f (%s)" % (self._Windchill, self._WindchillMinMax._Min._Value, self._WindchillMinMax._Min._Time, self._WindchillMinMax._Max._Value, self._WindchillMinMax._Max._Time))
logdbg("_Dewpoint= %8.3f _Min=%8.3f (%s) _Max=%8.3f (%s)" % (self._Dewpoint, self._DewpointMinMax._Min._Value, self._DewpointMinMax._Min._Time, self._DewpointMinMax._Max._Value, self._DewpointMinMax._Max._Time))
logdbg("_WindSpeed= %8.3f" % self._WindSpeed)
logdbg("_Gust= %8.3f _Max=%8.3f (%s)" % (self._Gust, self._GustMax._Max._Value, self._GustMax._Max._Time))
logdbg('_WindDirection= %3s _GustDirection= %3s' % (CWeatherTraits.windDirMap[self._WindDirection], CWeatherTraits.windDirMap[self._GustDirection]))
logdbg('_WindDirection1= %3s _GustDirection1= %3s' % (CWeatherTraits.windDirMap[self._WindDirection1], CWeatherTraits.windDirMap[self._GustDirection1]))
logdbg('_WindDirection2= %3s _GustDirection2= %3s' % (CWeatherTraits.windDirMap[self._WindDirection2], CWeatherTraits.windDirMap[self._GustDirection2]))
logdbg('_WindDirection3= %3s _GustDirection3= %3s' % (CWeatherTraits.windDirMap[self._WindDirection3], CWeatherTraits.windDirMap[self._GustDirection3]))
logdbg('_WindDirection4= %3s _GustDirection4= %3s' % (CWeatherTraits.windDirMap[self._WindDirection4], CWeatherTraits.windDirMap[self._GustDirection4]))
logdbg('_WindDirection5= %3s _GustDirection5= %3s' % (CWeatherTraits.windDirMap[self._WindDirection5], CWeatherTraits.windDirMap[self._GustDirection5]))
if (self._RainLastMonth > 0) or (self._RainLastWeek > 0):
logdbg("_RainLastMonth= %8.3f _Max=%8.3f (%s)" % (self._RainLastMonth, self._RainLastMonthMax._Max._Value, self._RainLastMonthMax._Max._Time))
logdbg("_RainLastWeek= %8.3f _Max=%8.3f (%s)" % (self._RainLastWeek, self._RainLastWeekMax._Max._Value, self._RainLastWeekMax._Max._Time))
logdbg("_Rain24H= %8.3f _Max=%8.3f (%s)" % (self._Rain24H, self._Rain24HMax._Max._Value, self._Rain24HMax._Max._Time))
logdbg("_Rain1H= %8.3f _Max=%8.3f (%s)" % (self._Rain1H, self._Rain1HMax._Max._Value, self._Rain1HMax._Max._Time))
logdbg("_RainTotal= %8.3f _LastRainReset= (%s)" % (self._RainTotal, self._LastRainReset))
logdbg("PressureRel_hPa= %8.3f _Min=%8.3f (%s) _Max=%8.3f (%s) " % (self._PressureRelative_hPa, self._PressureRelative_hPaMinMax._Min._Value, self._PressureRelative_hPaMinMax._Min._Time, self._PressureRelative_hPaMinMax._Max._Value, self._PressureRelative_hPaMinMax._Max._Time))
logdbg("PressureRel_inHg=%8.3f _Min=%8.3f (%s) _Max=%8.3f (%s) " % (self._PressureRelative_inHg, self._PressureRelative_inHgMinMax._Min._Value, self._PressureRelative_inHgMinMax._Min._Time, self._PressureRelative_inHgMinMax._Max._Value, self._PressureRelative_inHgMinMax._Max._Time))
###logdbg('(* Bug in Weather Station: PressureRelative._Min._Time is written to location of _PressureRelative._Max._Time')
###logdbg('Instead of PressureRelative._Min._Time we get: _PresRel_hPa_Max= %8.3f, _PresRel_inHg_max =%8.3f;' % (self._PresRel_hPa_Max, self._PresRel_inHg_Max))
class CWeatherStationConfig(object):
def __init__(self):
self._InBufCS = 0 # checksum of received config
self._OutBufCS = 0 # calculated config checksum from outbuf config
self._ClockMode = 0
self._TemperatureFormat = 0
self._PressureFormat = 0
self._RainFormat = 0
self._WindspeedFormat = 0
self._WeatherThreshold = 0
self._StormThreshold = 0
self._LCDContrast = 0
self._LowBatFlags = 0
self._WindDirAlarmFlags = 0
self._OtherAlarmFlags = 0
self._ResetMinMaxFlags = 0 # output only
self._HistoryInterval = 0
self._TempIndoorMinMax = CMinMaxMeasurement()
self._TempOutdoorMinMax = CMinMaxMeasurement()
self._HumidityIndoorMinMax = CMinMaxMeasurement()
self._HumidityOutdoorMinMax = CMinMaxMeasurement()
self._Rain24HMax = CMinMaxMeasurement()
self._GustMax = CMinMaxMeasurement()
self._PressureRelative_hPaMinMax = CMinMaxMeasurement()
self._PressureRelative_inHgMinMax = CMinMaxMeasurement()
def setTemps(self,TempFormat,InTempLo,InTempHi,OutTempLo,OutTempHi):
f1 = TempFormat
t1 = InTempLo
t2 = InTempHi
t3 = OutTempLo
t4 = OutTempHi
if f1 not in [ETemperatureFormat.tfFahrenheit,
ETemperatureFormat.tfCelsius]:
logerr('setTemps: unknown temperature format %s' % TempFormat)
return 0
if t1 < -40.0 or t1 > 59.9 or t2 < -40.0 or t2 > 59.9 or \
t3 < -40.0 or t3 > 59.9 or t4 < -40.0 or t4 > 59.9:
logerr('setTemps: one or more values out of range')
return 0
self._TemperatureFormat = f1
self._TempIndoorMinMax._Min._Value = t1
self._TempIndoorMinMax._Max._Value = t2
self._TempOutdoorMinMax._Min._Value = t3
self._TempOutdoorMinMax._Max._Value = t4
return 1
def setHums(self,InHumLo,InHumHi,OutHumLo,OutHumHi):
h1 = InHumLo
h2 = InHumHi
h3 = OutHumLo
h4 = OutHumHi
if h1 < 1 or h1 > 99 or h2 < 1 or h2 > 99 or \
h3 < 1 or h3 > 99 or h4 < 1 or h4 > 99:
logerr('setHums: one or more values out of range')
return 0
self._HumidityIndoorMinMax._Min._Value = h1
self._HumidityIndoorMinMax._Max._Value = h2
self._HumidityOutdoorMinMax._Min._Value = h3
self._HumidityOutdoorMinMax._Max._Value = h4
return 1
def setRain24H(self,RainFormat,Rain24hHi):
f1 = RainFormat
r1 = Rain24hHi
if f1 not in [ERainFormat.rfMm, ERainFormat.rfInch]:
logerr('setRain24: unknown format %s' % RainFormat)
return 0
if r1 < 0.0 or r1 > 9999.9:
logerr('setRain24: value outside range')
return 0
self._RainFormat = f1
self._Rain24HMax._Max._Value = r1
return 1
def setGust(self,WindSpeedFormat,GustHi):
# When the units of a max gust alarm are changed in the weather
# station itself, automatically the value is converted to the new
# unit and rounded to a whole number. Weewx receives a value
# converted to km/h.
#
# It is too much trouble to sort out what exactly the internal
# conversion algoritms are for the other wind units.
#
# Setting a value in km/h units is tested and works, so this will
# be the only option available.
f1 = WindSpeedFormat
g1 = GustHi
if f1 < EWindspeedFormat.wfMs or f1 > EWindspeedFormat.wfMph:
logerr('setGust: unknown format %s' % WindSpeedFormat)
return 0
if f1 != EWindspeedFormat.wfKmh:
logerr('setGust: only units of km/h are supported')
return 0
if g1 < 0.0 or g1 > 180.0:
logerr('setGust: value outside range')
return 0
self._WindSpeedFormat = f1
self._GustMax._Max._Value = int(g1) # apparently gust value is always an integer
return 1
def setPresRels(self,PressureFormat,PresRelhPaLo,PresRelhPaHi,PresRelinHgLo,PresRelinHgHi):
f1 = PressureFormat
p1 = PresRelhPaLo
p2 = PresRelhPaHi
p3 = PresRelinHgLo
p4 = PresRelinHgHi
if f1 not in [EPressureFormat.pfinHg, EPressureFormat.pfHPa]:
logerr('setPresRel: unknown format %s' % PressureFormat)
return 0
if p1 < 920.0 or p1 > 1080.0 or p2 < 920.0 or p2 > 1080.0 or \
p3 < 27.10 or p3 > 31.90 or p4 < 27.10 or p4 > 31.90:
logerr('setPresRel: value outside range')
return 0
self._RainFormat = f1
self._PressureRelative_hPaMinMax._Min._Value = p1
self._PressureRelative_hPaMinMax._Max._Value = p2
self._PressureRelative_inHgMinMax._Min._Value = p3
self._PressureRelative_inHgMinMax._Max._Value = p4
return 1
def getOutBufCS(self):
return self._OutBufCS
def getInBufCS(self):
return self._InBufCS
def setResetMinMaxFlags(self, resetMinMaxFlags):
logdbg('setResetMinMaxFlags: %s' % resetMinMaxFlags)
self._ResetMinMaxFlags = resetMinMaxFlags
def parseRain_3(self, number, buf, start, StartOnHiNibble, numbytes):
'''Parse 7-digit number with 3 decimals'''
num = int(number*1000)
parsebuf=[0]*7
for i in xrange(7-numbytes,7):
parsebuf[i] = num%10
num = num//10
if StartOnHiNibble:
buf[0][0+start] = parsebuf[6]*16 + parsebuf[5]
buf[0][1+start] = parsebuf[4]*16 + parsebuf[3]
buf[0][2+start] = parsebuf[2]*16 + parsebuf[1]
buf[0][3+start] = parsebuf[0]*16 + (buf[0][3+start] & 0xF)
else:
buf[0][0+start] = (buf[0][0+start] & 0xF0) + parsebuf[6]
buf[0][1+start] = parsebuf[5]*16 + parsebuf[4]
buf[0][2+start] = parsebuf[3]*16 + parsebuf[2]
buf[0][3+start] = parsebuf[1]*16 + parsebuf[0]
def parseWind_6(self, number, buf, start):
'''Parse float number to 6 bytes'''
num = int(number*100*256)
parsebuf=[0]*6
for i in xrange(0,6):
parsebuf[i] = num%16
num = num//16
buf[0][0+start] = parsebuf[5]*16 + parsebuf[4]
buf[0][1+start] = parsebuf[3]*16 + parsebuf[2]
buf[0][2+start] = parsebuf[1]*16 + parsebuf[0]
def parse_0(self, number, buf, start, StartOnHiNibble, numbytes):
'''Parse 5-digit number with 0 decimals'''
num = int(number)
nbuf=[0]*5
for i in xrange(5-numbytes,5):
nbuf[i] = num%10
num = num//10
if StartOnHiNibble:
buf[0][0+start] = nbuf[4]*16 + nbuf[3]
buf[0][1+start] = nbuf[2]*16 + nbuf[1]
buf[0][2+start] = nbuf[0]*16 + (buf[0][2+start] & 0x0F)
else:
buf[0][0+start] = (buf[0][0+start] & 0xF0) + nbuf[4]
buf[0][1+start] = nbuf[3]*16 + nbuf[2]
buf[0][2+start] = nbuf[1]*16 + nbuf[0]
def parse_1(self, number, buf, start, StartOnHiNibble, numbytes):
'''Parse 5 digit number with 1 decimal'''
self.parse_0(number*10.0, buf, start, StartOnHiNibble, numbytes)
def parse_2(self, number, buf, start, StartOnHiNibble, numbytes):
'''Parse 5 digit number with 2 decimals'''
self.parse_0(number*100.0, buf, start, StartOnHiNibble, numbytes)
def parse_3(self, number, buf, start, StartOnHiNibble, numbytes):
'''Parse 5 digit number with 3 decimals'''
self.parse_0(number*1000.0, buf, start, StartOnHiNibble, numbytes)
def read(self,buf):
nbuf=[0]
nbuf[0]=buf[0]
self._WindspeedFormat = (nbuf[0][4] >> 4) & 0xF
self._RainFormat = (nbuf[0][4] >> 3) & 1
self._PressureFormat = (nbuf[0][4] >> 2) & 1
self._TemperatureFormat = (nbuf[0][4] >> 1) & 1
self._ClockMode = nbuf[0][4] & 1
self._StormThreshold = (nbuf[0][5] >> 4) & 0xF
self._WeatherThreshold = nbuf[0][5] & 0xF
self._LowBatFlags = (nbuf[0][6] >> 4) & 0xF
self._LCDContrast = nbuf[0][6] & 0xF
self._WindDirAlarmFlags = (nbuf[0][7] << 8) | nbuf[0][8]
self._OtherAlarmFlags = (nbuf[0][9] << 8) | nbuf[0][10]
self._TempIndoorMinMax._Max._Value = USBHardware.toTemperature_5_3(nbuf, 11, 1)
self._TempIndoorMinMax._Min._Value = USBHardware.toTemperature_5_3(nbuf, 13, 0)
self._TempOutdoorMinMax._Max._Value = USBHardware.toTemperature_5_3(nbuf, 16, 1)
self._TempOutdoorMinMax._Min._Value = USBHardware.toTemperature_5_3(nbuf, 18, 0)
self._HumidityIndoorMinMax._Max._Value = USBHardware.toHumidity_2_0(nbuf, 21, 1)
self._HumidityIndoorMinMax._Min._Value = USBHardware.toHumidity_2_0(nbuf, 22, 1)
self._HumidityOutdoorMinMax._Max._Value = USBHardware.toHumidity_2_0(nbuf, 23, 1)
self._HumidityOutdoorMinMax._Min._Value = USBHardware.toHumidity_2_0(nbuf, 24, 1)
self._Rain24HMax._Max._Value = USBHardware.toRain_7_3(nbuf, 25, 0)
self._HistoryInterval = nbuf[0][29]
self._GustMax._Max._Value = USBHardware.toWindspeed_6_2(nbuf, 30)
(self._PressureRelative_hPaMinMax._Min._Value, self._PressureRelative_inHgMinMax._Min._Value) = USBHardware.readPressureShared(nbuf, 33, 1)
(self._PressureRelative_hPaMinMax._Max._Value, self._PressureRelative_inHgMinMax._Max._Value) = USBHardware.readPressureShared(nbuf, 38, 1)
self._ResetMinMaxFlags = (nbuf[0][43]) <<16 | (nbuf[0][44] << 8) | (nbuf[0][45])
self._InBufCS = (nbuf[0][46] << 8) | nbuf[0][47]
self._OutBufCS = calc_checksum(buf, 4, end=39) + 7
"""
Reset DewpointMax 80 00 00
Reset DewpointMin 40 00 00
not used 20 00 00
Reset WindchillMin* 10 00 00 *dateTime only; Min._Value is preserved
Reset TempOutMax 08 00 00
Reset TempOutMin 04 00 00
Reset TempInMax 02 00 00
Reset TempInMin 01 00 00
Reset Gust 00 80 00
not used 00 40 00
not used 00 20 00
not used 00 10 00
Reset HumOutMax 00 08 00
Reset HumOutMin 00 04 00
Reset HumInMax 00 02 00
Reset HumInMin 00 01 00
not used 00 00 80
Reset Rain Total 00 00 40
Reset last month? 00 00 20
Reset last week? 00 00 10
Reset Rain24H 00 00 08
Reset Rain1H 00 00 04
Reset PresRelMax 00 00 02
Reset PresRelMin 00 00 01
"""
#self._ResetMinMaxFlags = 0x000000
#logdbg('set _ResetMinMaxFlags to %06x' % self._ResetMinMaxFlags)
"""
setTemps(self,TempFormat,InTempLo,InTempHi,OutTempLo,OutTempHi)
setHums(self,InHumLo,InHumHi,OutHumLo,OutHumHi)
setPresRels(self,PressureFormat,PresRelhPaLo,PresRelhPaHi,PresRelinHgLo,PresRelinHgHi)
setGust(self,WindSpeedFormat,GustHi)
setRain24H(self,RainFormat,Rain24hHi)
"""
# Examples:
#self.setTemps(ETemperatureFormat.tfCelsius,1.0,41.0,2.0,42.0)
#self.setHums(41,71,42,72)
#self.setPresRels(EPressureFormat.pfHPa,960.1,1040.1,28.36,30.72)
#self.setGust(EWindspeedFormat.wfKmh,040.0)
#self.setRain24H(ERainFormat.rfMm,50.0)
# Set historyInterval to 5 minutes (default: 2 hours)
self._HistoryInterval = EHistoryInterval.hi05Min
# Clear all alarm flags, otherwise the datastream from the weather
# station will pause during an alarm and connection will be lost.
self._WindDirAlarmFlags = 0x0000
self._OtherAlarmFlags = 0x0000
def testConfigChanged(self,buf):
nbuf = [0]
nbuf[0] = buf[0]
nbuf[0][0] = 16*(self._WindspeedFormat & 0xF) + 8*(self._RainFormat & 1) + 4*(self._PressureFormat & 1) + 2*(self._TemperatureFormat & 1) + (self._ClockMode & 1)
nbuf[0][1] = self._WeatherThreshold & 0xF | 16 * self._StormThreshold & 0xF0
nbuf[0][2] = self._LCDContrast & 0xF | 16 * self._LowBatFlags & 0xF0
nbuf[0][3] = (self._OtherAlarmFlags >> 0) & 0xFF
nbuf[0][4] = (self._OtherAlarmFlags >> 8) & 0xFF
nbuf[0][5] = (self._WindDirAlarmFlags >> 0) & 0xFF
nbuf[0][6] = (self._WindDirAlarmFlags >> 8) & 0xFF
# reverse buf from here
self.parse_2(self._PressureRelative_inHgMinMax._Max._Value, nbuf, 7, 1, 5)
self.parse_1(self._PressureRelative_hPaMinMax._Max._Value, nbuf, 9, 0, 5)
self.parse_2(self._PressureRelative_inHgMinMax._Min._Value, nbuf, 12, 1, 5)
self.parse_1(self._PressureRelative_hPaMinMax._Min._Value, nbuf, 14, 0, 5)
self.parseWind_6(self._GustMax._Max._Value, nbuf, 17)
nbuf[0][20] = self._HistoryInterval & 0xF
self.parseRain_3(self._Rain24HMax._Max._Value, nbuf, 21, 0, 7)
self.parse_0(self._HumidityOutdoorMinMax._Max._Value, nbuf, 25, 1, 2)
self.parse_0(self._HumidityOutdoorMinMax._Min._Value, nbuf, 26, 1, 2)
self.parse_0(self._HumidityIndoorMinMax._Max._Value, nbuf, 27, 1, 2)
self.parse_0(self._HumidityIndoorMinMax._Min._Value, nbuf, 28, 1, 2)
self.parse_3(self._TempOutdoorMinMax._Max._Value + CWeatherTraits.TemperatureOffset(), nbuf, 29, 1, 5)
self.parse_3(self._TempOutdoorMinMax._Min._Value + CWeatherTraits.TemperatureOffset(), nbuf, 31, 0, 5)
self.parse_3(self._TempIndoorMinMax._Max._Value + CWeatherTraits.TemperatureOffset(), nbuf, 34, 1, 5)
self.parse_3(self._TempIndoorMinMax._Min._Value + CWeatherTraits.TemperatureOffset(), nbuf, 36, 0, 5)
# reverse buf to here
USBHardware.reverseByteOrder(nbuf, 7, 32)
# do not include the ResetMinMaxFlags bytes when calculating checksum
nbuf[0][39] = (self._ResetMinMaxFlags >> 16) & 0xFF
nbuf[0][40] = (self._ResetMinMaxFlags >> 8) & 0xFF
nbuf[0][41] = (self._ResetMinMaxFlags >> 0) & 0xFF
self._OutBufCS = calc_checksum(nbuf, 0, end=39) + 7
nbuf[0][42] = (self._OutBufCS >> 8) & 0xFF
nbuf[0][43] = (self._OutBufCS >> 0) & 0xFF
buf[0] = nbuf[0]
if self._OutBufCS == self._InBufCS and self._ResetMinMaxFlags == 0:
if DEBUG_CONFIG_DATA > 2:
logdbg('testConfigChanged: checksum not changed: OutBufCS=%04x' % self._OutBufCS)
changed = 0
else:
if DEBUG_CONFIG_DATA > 0:
logdbg('testConfigChanged: checksum or resetMinMaxFlags changed: OutBufCS=%04x InBufCS=%04x _ResetMinMaxFlags=%06x' % (self._OutBufCS, self._InBufCS, self._ResetMinMaxFlags))
if DEBUG_CONFIG_DATA > 1:
self.toLog()
changed = 1
return changed
def toLog(self):
logdbg('OutBufCS= %04x' % self._OutBufCS)
logdbg('InBufCS= %04x' % self._InBufCS)
logdbg('ClockMode= %s' % self._ClockMode)
logdbg('TemperatureFormat= %s' % self._TemperatureFormat)
logdbg('PressureFormat= %s' % self._PressureFormat)
logdbg('RainFormat= %s' % self._RainFormat)
logdbg('WindspeedFormat= %s' % self._WindspeedFormat)
logdbg('WeatherThreshold= %s' % self._WeatherThreshold)
logdbg('StormThreshold= %s' % self._StormThreshold)
logdbg('LCDContrast= %s' % self._LCDContrast)
logdbg('LowBatFlags= %01x' % self._LowBatFlags)
logdbg('WindDirAlarmFlags= %04x' % self._WindDirAlarmFlags)
logdbg('OtherAlarmFlags= %04x' % self._OtherAlarmFlags)
logdbg('HistoryInterval= %s' % self._HistoryInterval)
logdbg('TempIndoor_Min= %s' % self._TempIndoorMinMax._Min._Value)
logdbg('TempIndoor_Max= %s' % self._TempIndoorMinMax._Max._Value)
logdbg('TempOutdoor_Min= %s' % self._TempOutdoorMinMax._Min._Value)
logdbg('TempOutdoor_Max= %s' % self._TempOutdoorMinMax._Max._Value)
logdbg('HumidityIndoor_Min= %s' % self._HumidityIndoorMinMax._Min._Value)
logdbg('HumidityIndoor_Max= %s' % self._HumidityIndoorMinMax._Max._Value)
logdbg('HumidityOutdoor_Min= %s' % self._HumidityOutdoorMinMax._Min._Value)
logdbg('HumidityOutdoor_Max= %s' % self._HumidityOutdoorMinMax._Max._Value)
logdbg('Rain24HMax= %s' % self._Rain24HMax._Max._Value)
logdbg('GustMax= %s' % self._GustMax._Max._Value)
logdbg('PressureRel_hPa_Min= %s' % self._PressureRelative_hPaMinMax._Min._Value)
logdbg('PressureRel_inHg_Min= %s' % self._PressureRelative_inHgMinMax._Min._Value)
logdbg('PressureRel_hPa_Max= %s' % self._PressureRelative_hPaMinMax._Max._Value)
logdbg('PressureRel_inHg_Max= %s' % self._PressureRelative_inHgMinMax._Max._Value)
logdbg('ResetMinMaxFlags= %06x (Output only)' % self._ResetMinMaxFlags)
def asDict(self):
return {
'checksum_in': self._InBufCS,
'checksum_out': self._OutBufCS,
'format_clock': self._ClockMode,
'format_temperature': self._TemperatureFormat,
'format_pressure': self._PressureFormat,
'format_rain': self._RainFormat,
'format_windspeed': self._WindspeedFormat,
'threshold_weather': self._WeatherThreshold,
'threshold_storm': self._StormThreshold,
'lcd_contrast': self._LCDContrast,
'low_battery_flags': self._LowBatFlags,
'alarm_flags_wind_dir': self._WindDirAlarmFlags,
'alarm_flags_other': self._OtherAlarmFlags,
# 'reset_minmax_flags': self._ResetMinMaxFlags,
'history_interval': self._HistoryInterval,
'indoor_temp_min': self._TempIndoorMinMax._Min._Value,
'indoor_temp_min_time': self._TempIndoorMinMax._Min._Time,
'indoor_temp_max': self._TempIndoorMinMax._Max._Value,
'indoor_temp_max_time': self._TempIndoorMinMax._Max._Time,
'indoor_humidity_min': self._HumidityIndoorMinMax._Min._Value,
'indoor_humidity_min_time': self._HumidityIndoorMinMax._Min._Time,
'indoor_humidity_max': self._HumidityIndoorMinMax._Max._Value,
'indoor_humidity_max_time': self._HumidityIndoorMinMax._Max._Time,
'outdoor_temp_min': self._TempOutdoorMinMax._Min._Value,
'outdoor_temp_min_time': self._TempOutdoorMinMax._Min._Time,
'outdoor_temp_max': self._TempOutdoorMinMax._Max._Value,
'outdoor_temp_max_time': self._TempOutdoorMinMax._Max._Time,
'outdoor_humidity_min': self._HumidityOutdoorMinMax._Min._Value,
'outdoor_humidity_min_time':self._HumidityOutdoorMinMax._Min._Time,
'outdoor_humidity_max': self._HumidityOutdoorMinMax._Max._Value,
'outdoor_humidity_max_time':self._HumidityOutdoorMinMax._Max._Time,
'rain_24h_max': self._Rain24HMax._Max._Value,
'rain_24h_max_time': self._Rain24HMax._Max._Time,
'wind_gust_max': self._GustMax._Max._Value,
'wind_gust_max_time': self._GustMax._Max._Time,
'pressure_min': self._PressureRelative_hPaMinMax._Min._Value,
'pressure_min_time': self._PressureRelative_hPaMinMax._Min._Time,
'pressure_max': self._PressureRelative_hPaMinMax._Max._Value,
'pressure_max_time': self._PressureRelative_hPaMinMax._Max._Time
# do not bother with pressure inHg
}
class CHistoryData(object):
def __init__(self):
self.Time = None
self.TempIndoor = CWeatherTraits.TemperatureNP()
self.HumidityIndoor = CWeatherTraits.HumidityNP()
self.TempOutdoor = CWeatherTraits.TemperatureNP()
self.HumidityOutdoor = CWeatherTraits.HumidityNP()
self.PressureRelative = None
self.RainCounterRaw = 0
self.WindSpeed = CWeatherTraits.WindNP()
self.WindDirection = EWindDirection.wdNone
self.Gust = CWeatherTraits.WindNP()
self.GustDirection = EWindDirection.wdNone
def read(self, buf):
nbuf = [0]
nbuf[0] = buf[0]
self.Gust = USBHardware.toWindspeed_3_1(nbuf, 12, 0)
self.GustDirection = (nbuf[0][14] >> 4) & 0xF
self.WindSpeed = USBHardware.toWindspeed_3_1(nbuf, 14, 0)
self.WindDirection = (nbuf[0][14] >> 4) & 0xF
self.RainCounterRaw = USBHardware.toRain_3_1(nbuf, 16, 1)
self.HumidityOutdoor = USBHardware.toHumidity_2_0(nbuf, 17, 0)
self.HumidityIndoor = USBHardware.toHumidity_2_0(nbuf, 18, 0)
self.PressureRelative = USBHardware.toPressure_hPa_5_1(nbuf, 19, 0)
self.TempIndoor = USBHardware.toTemperature_3_1(nbuf, 23, 0)
self.TempOutdoor = USBHardware.toTemperature_3_1(nbuf, 22, 1)
self.Time = USBHardware.toDateTime(nbuf, 25, 1, 'HistoryData')
def toLog(self):
"""emit raw historical data"""
logdbg("Time %s" % self.Time)
logdbg("TempIndoor= %7.1f" % self.TempIndoor)
logdbg("HumidityIndoor= %7.0f" % self.HumidityIndoor)
logdbg("TempOutdoor= %7.1f" % self.TempOutdoor)
logdbg("HumidityOutdoor= %7.0f" % self.HumidityOutdoor)
logdbg("PressureRelative= %7.1f" % self.PressureRelative)
logdbg("RainCounterRaw= %7.3f" % self.RainCounterRaw)
logdbg("WindSpeed= %7.3f" % self.WindSpeed)
logdbg("WindDirection= % 3s" % CWeatherTraits.windDirMap[self.WindDirection])
logdbg("Gust= %7.3f" % self.Gust)
logdbg("GustDirection= % 3s" % CWeatherTraits.windDirMap[self.GustDirection])
def asDict(self):
"""emit historical data as a dict with weewx conventions"""
return {
'dateTime': tstr_to_ts(str(self.Time)),
'inTemp': self.TempIndoor,
'inHumidity': self.HumidityIndoor,
'outTemp': self.TempOutdoor,
'outHumidity': self.HumidityOutdoor,
'pressure': self.PressureRelative,
'rain': self.RainCounterRaw / 10, # weewx wants cm
'windSpeed': self.WindSpeed,
'windDir': getWindDir(self.WindDirection, self.WindSpeed),
'windGust': self.Gust,
'windGustDir': getWindDir(self.GustDirection, self.Gust),
}
class HistoryCache:
def __init__(self):
self.clear_records()
def clear_records(self):
self.since_ts = 0
self.num_rec = 0
self.start_index = None
self.next_index = None
self.records = []
self.num_outstanding_records = None
self.num_scanned = 0
self.last_ts = 0
class CDataStore(object):
class TTransceiverSettings(object):
def __init__(self):
self.VendorId = 0x6666
self.ProductId = 0x5555
self.VersionNo = 1
self.manufacturer = "LA CROSSE TECHNOLOGY"
self.product = "Weather Direct Light Wireless Device"
self.FrequencyStandard = EFrequency.fsUS
self.Frequency = getFrequency(self.FrequencyStandard)
self.SerialNumber = None
self.DeviceID = None
class TLastStat(object):
def __init__(self):
self.LastBatteryStatus = None
self.LastLinkQuality = None
self.LastHistoryIndex = None
self.LatestHistoryIndex = None
self.last_seen_ts = None
self.last_weather_ts = 0
self.last_history_ts = 0
self.last_config_ts = 0
def __init__(self):
self.transceiverPresent = False
self.commModeInterval = 3
self.registeredDeviceID = None
self.LastStat = CDataStore.TLastStat()
self.TransceiverSettings = CDataStore.TTransceiverSettings()
self.StationConfig = CWeatherStationConfig()
self.CurrentWeather = CCurrentWeatherData()
def getFrequencyStandard(self):
return self.TransceiverSettings.FrequencyStandard
def setFrequencyStandard(self, val):
logdbg('setFrequency: %s' % val)
self.TransceiverSettings.FrequencyStandard = val
self.TransceiverSettings.Frequency = getFrequency(val)
def getDeviceID(self):
return self.TransceiverSettings.DeviceID
def setDeviceID(self,val):
logdbg("setDeviceID: %04x" % val)
self.TransceiverSettings.DeviceID = val
def getRegisteredDeviceID(self):
return self.registeredDeviceID
def setRegisteredDeviceID(self, val):
if val != self.registeredDeviceID:
loginf("console is paired to device with ID %04x" % val)
self.registeredDeviceID = val
def getTransceiverPresent(self):
return self.transceiverPresent
def setTransceiverPresent(self, val):
self.transceiverPresent = val
def setLastStatCache(self, seen_ts=None,
quality=None, battery=None,
weather_ts=None,
history_ts=None,
config_ts=None):
if DEBUG_COMM > 1:
logdbg('setLastStatCache: seen=%s quality=%s battery=%s weather=%s history=%s config=%s' %
(seen_ts, quality, battery, weather_ts, history_ts, config_ts))
if seen_ts is not None:
self.LastStat.last_seen_ts = seen_ts
if quality is not None:
self.LastStat.LastLinkQuality = quality
if battery is not None:
self.LastStat.LastBatteryStatus = battery
if weather_ts is not None:
self.LastStat.last_weather_ts = weather_ts
if history_ts is not None:
self.LastStat.last_history_ts = history_ts
if config_ts is not None:
self.LastStat.last_config_ts = config_ts
def setLastHistoryIndex(self,val):
self.LastStat.LastHistoryIndex = val
def getLastHistoryIndex(self):
return self.LastStat.LastHistoryIndex
def setLatestHistoryIndex(self,val):
self.LastStat.LatestHistoryIndex = val
def getLatestHistoryIndex(self):
return self.LastStat.LatestHistoryIndex
def setCurrentWeather(self, data):
self.CurrentWeather = data
def getDeviceRegistered(self):
if ( self.registeredDeviceID is None
or self.TransceiverSettings.DeviceID is None
or self.registeredDeviceID != self.TransceiverSettings.DeviceID ):
return False
return True
def getCommModeInterval(self):
return self.commModeInterval
def setCommModeInterval(self,val):
logdbg("setCommModeInterval to %x" % val)
self.commModeInterval = val
def setTransceiverSerNo(self,val):
logdbg("setTransceiverSerialNumber to %s" % val)
self.TransceiverSettings.SerialNumber = val
def getTransceiverSerNo(self):
return self.TransceiverSettings.SerialNumber
class sHID(object):
"""USB driver abstraction"""
def __init__(self):
self.devh = None
self.timeout = 1000
self.last_dump = None
def open(self, vid, pid, did, serial):
device = self._find_device(vid, pid, did, serial)
if device is None:
logcrt('Cannot find USB device with Vendor=0x%04x ProdID=0x%04x Device=%s Serial=%s' % (vid, pid, did, serial))
raise weewx.WeeWxIOError('Unable to find transceiver on USB')
self._open_device(device)
def close(self):
self._close_device()
def _find_device(self, vid, pid, did, serial):
for bus in usb.busses():
for dev in bus.devices:
if dev.idVendor == vid and dev.idProduct == pid:
if did is None or dev.filename == did:
if serial is None:
loginf('found transceiver at bus=%s device=%s' %
(bus.dirname, dev.filename))
return dev
else:
handle = dev.open()
try:
buf = self.readCfg(handle, 0x1F9, 7)
sn = str("%02d" % (buf[0]))
sn += str("%02d" % (buf[1]))
sn += str("%02d" % (buf[2]))
sn += str("%02d" % (buf[3]))
sn += str("%02d" % (buf[4]))
sn += str("%02d" % (buf[5]))
sn += str("%02d" % (buf[6]))
if str(serial) == sn:
loginf('found transceiver at bus=%s device=%s serial=%s' % (bus.dirname, dev.filename, sn))
return dev
else:
loginf('skipping transceiver with serial %s (looking for %s)' % (sn, serial))
finally:
del handle
return None
def _open_device(self, dev, interface=0):
self.devh = dev.open()
if not self.devh:
raise weewx.WeeWxIOError('Open USB device failed')
loginf('manufacturer: %s' % self.devh.getString(dev.iManufacturer,30))
loginf('product: %s' % self.devh.getString(dev.iProduct,30))
loginf('interface: %d' % interface)
# be sure kernel does not claim the interface
try:
self.devh.detachKernelDriver(interface)
except Exception:
pass
# attempt to claim the interface
try:
logdbg('claiming USB interface %d' % interface)
self.devh.claimInterface(interface)
self.devh.setAltInterface(interface)
except usb.USBError, e:
self._close_device()
logcrt('Unable to claim USB interface %s: %s' % (interface, e))
raise weewx.WeeWxIOError(e)
# FIXME: this seems to be specific to ws28xx?
# FIXME: check return values
usbWait = 0.05
self.devh.getDescriptor(0x1, 0, 0x12)
time.sleep(usbWait)
self.devh.getDescriptor(0x2, 0, 0x9)
time.sleep(usbWait)
self.devh.getDescriptor(0x2, 0, 0x22)
time.sleep(usbWait)
self.devh.controlMsg(usb.TYPE_CLASS + usb.RECIP_INTERFACE,
0xa, [], 0x0, 0x0, 1000)
time.sleep(usbWait)
self.devh.getDescriptor(0x22, 0, 0x2a9)
time.sleep(usbWait)
def _close_device(self):
try:
logdbg('releasing USB interface')
self.devh.releaseInterface()
except Exception:
pass
self.devh = None
def setTX(self):
buf = [0]*0x15
buf[0] = 0xD1
if DEBUG_COMM > 1:
self.dump('setTX', buf, fmt=DEBUG_DUMP_FORMAT)
self.devh.controlMsg(usb.TYPE_CLASS + usb.RECIP_INTERFACE,
request=0x0000009,
buffer=buf,
value=0x00003d1,
index=0x0000000,
timeout=self.timeout)
def setRX(self):
buf = [0]*0x15
buf[0] = 0xD0
if DEBUG_COMM > 1:
self.dump('setRX', buf, fmt=DEBUG_DUMP_FORMAT)
self.devh.controlMsg(usb.TYPE_CLASS + usb.RECIP_INTERFACE,
request=0x0000009,
buffer=buf,
value=0x00003d0,
index=0x0000000,
timeout=self.timeout)
def getState(self,StateBuffer):
buf = self.devh.controlMsg(requestType=usb.TYPE_CLASS |
usb.RECIP_INTERFACE | usb.ENDPOINT_IN,
request=usb.REQ_CLEAR_FEATURE,
buffer=0x0a,
value=0x00003de,
index=0x0000000,
timeout=self.timeout)
if DEBUG_COMM > 1:
self.dump('getState', buf, fmt=DEBUG_DUMP_FORMAT)
StateBuffer[0]=[0]*0x2
StateBuffer[0][0]=buf[1]
StateBuffer[0][1]=buf[2]
def readConfigFlash(self, addr, numBytes, data):
if numBytes > 512:
raise Exception('bad number of bytes')
while numBytes:
buf=[0xcc]*0x0f #0x15
buf[0] = 0xdd
buf[1] = 0x0a
buf[2] = (addr >>8) & 0xFF
buf[3] = (addr >>0) & 0xFF
if DEBUG_COMM > 1:
self.dump('readCfgFlash>', buf, fmt=DEBUG_DUMP_FORMAT)
self.devh.controlMsg(usb.TYPE_CLASS + usb.RECIP_INTERFACE,
request=0x0000009,
buffer=buf,
value=0x00003dd,
index=0x0000000,
timeout=self.timeout)
buf = self.devh.controlMsg(requestType=usb.TYPE_CLASS |
usb.RECIP_INTERFACE |
usb.ENDPOINT_IN,
request=usb.REQ_CLEAR_FEATURE,
buffer=0x15,
value=0x00003dc,
index=0x0000000,
timeout=self.timeout)
new_data=[0]*0x15
if numBytes < 16:
for i in xrange(0, numBytes):
new_data[i] = buf[i+4]
numBytes = 0
else:
for i in xrange(0, 16):
new_data[i] = buf[i+4]
numBytes -= 16
addr += 16
if DEBUG_COMM > 1:
self.dump('readCfgFlash<', buf, fmt=DEBUG_DUMP_FORMAT)
data[0] = new_data # FIXME: new_data might be unset
def setState(self,state):
buf = [0]*0x15
buf[0] = 0xd7
buf[1] = state
if DEBUG_COMM > 1:
self.dump('setState', buf, fmt=DEBUG_DUMP_FORMAT)
self.devh.controlMsg(usb.TYPE_CLASS + usb.RECIP_INTERFACE,
request=0x0000009,
buffer=buf,
value=0x00003d7,
index=0x0000000,
timeout=self.timeout)
def setFrame(self,data,numBytes):
buf = [0]*0x111
buf[0] = 0xd5
buf[1] = numBytes >> 8
buf[2] = numBytes
for i in xrange(0, numBytes):
buf[i+3] = data[i]
if DEBUG_COMM == 1:
self.dump('setFrame', buf, 'short')
elif DEBUG_COMM > 1:
self.dump('setFrame', buf, fmt=DEBUG_DUMP_FORMAT)
self.devh.controlMsg(usb.TYPE_CLASS + usb.RECIP_INTERFACE,
request=0x0000009,
buffer=buf,
value=0x00003d5,
index=0x0000000,
timeout=self.timeout)
def getFrame(self,data,numBytes):
buf = self.devh.controlMsg(requestType=usb.TYPE_CLASS |
usb.RECIP_INTERFACE |
usb.ENDPOINT_IN,
request=usb.REQ_CLEAR_FEATURE,
buffer=0x111,
value=0x00003d6,
index=0x0000000,
timeout=self.timeout)
new_data=[0]*0x131
new_numBytes=(buf[1] << 8 | buf[2])& 0x1ff
for i in xrange(0, new_numBytes):
new_data[i] = buf[i+3]
if DEBUG_COMM == 1:
self.dump('getFrame', buf, 'short')
elif DEBUG_COMM > 1:
self.dump('getFrame', buf, fmt=DEBUG_DUMP_FORMAT)
data[0] = new_data
numBytes[0] = new_numBytes
def writeReg(self,regAddr,data):
buf = [0]*0x05
buf[0] = 0xf0
buf[1] = regAddr & 0x7F
buf[2] = 0x01
buf[3] = data
buf[4] = 0x00
if DEBUG_COMM > 1:
self.dump('writeReg', buf, fmt=DEBUG_DUMP_FORMAT)
self.devh.controlMsg(usb.TYPE_CLASS + usb.RECIP_INTERFACE,
request=0x0000009,
buffer=buf,
value=0x00003f0,
index=0x0000000,
timeout=self.timeout)
def execute(self, command):
buf = [0]*0x0f #*0x15
buf[0] = 0xd9
buf[1] = command
if DEBUG_COMM > 1:
self.dump('execute', buf, fmt=DEBUG_DUMP_FORMAT)
self.devh.controlMsg(usb.TYPE_CLASS + usb.RECIP_INTERFACE,
request=0x0000009,
buffer=buf,
value=0x00003d9,
index=0x0000000,
timeout=self.timeout)
def setPreamblePattern(self,pattern):
buf = [0]*0x15
buf[0] = 0xd8
buf[1] = pattern
if DEBUG_COMM > 1:
self.dump('setPreamble', buf, fmt=DEBUG_DUMP_FORMAT)
self.devh.controlMsg(usb.TYPE_CLASS + usb.RECIP_INTERFACE,
request=0x0000009,
buffer=buf,
value=0x00003d8,
index=0x0000000,
timeout=self.timeout)
# three formats, long, short, auto. short shows only the first 16 bytes.
# long shows the full length of the buffer. auto shows the message length
# as indicated by the length in the message itself for setFrame and
# getFrame, or the first 16 bytes for any other message.
def dump(self, cmd, buf, fmt='auto'):
strbuf = ''
msglen = None
if fmt == 'auto':
if buf[0] in [0xd5, 0x00]:
msglen = buf[2] + 3 # use msg length for set/get frame
else:
msglen = 16 # otherwise do same as short format
elif fmt == 'short':
msglen = 16
for i,x in enumerate(buf):
strbuf += str('%02x ' % x)
if (i+1) % 16 == 0:
self.dumpstr(cmd, strbuf)
strbuf = ''
if msglen is not None and i+1 >= msglen:
break
if strbuf:
self.dumpstr(cmd, strbuf)
# filter output that we do not care about, pad the command string.
def dumpstr(self, cmd, strbuf):
pad = ' ' * (15-len(cmd))
# de15 is idle, de14 is intermediate
if strbuf in ['de 15 00 00 00 00 ','de 14 00 00 00 00 ']:
if strbuf != self.last_dump or DEBUG_COMM > 2:
logdbg('%s: %s%s' % (cmd, pad, strbuf))
self.last_dump = strbuf
else:
logdbg('%s: %s%s' % (cmd, pad, strbuf))
self.last_dump = None
def readCfg(self, handle, addr, numBytes):
while numBytes:
buf=[0xcc]*0x0f #0x15
buf[0] = 0xdd
buf[1] = 0x0a
buf[2] = (addr >>8) & 0xFF
buf[3] = (addr >>0) & 0xFF
handle.controlMsg(usb.TYPE_CLASS + usb.RECIP_INTERFACE,
request=0x0000009,
buffer=buf,
value=0x00003dd,
index=0x0000000,
timeout=1000)
buf = handle.controlMsg(requestType=usb.TYPE_CLASS |
usb.RECIP_INTERFACE | usb.ENDPOINT_IN,
request=usb.REQ_CLEAR_FEATURE,
buffer=0x15,
value=0x00003dc,
index=0x0000000,
timeout=1000)
new_data=[0]*0x15
if numBytes < 16:
for i in xrange(0, numBytes):
new_data[i] = buf[i+4]
numBytes = 0
else:
for i in xrange(0, 16):
new_data[i] = buf[i+4]
numBytes -= 16
addr += 16
return new_data
class CCommunicationService(object):
reg_names = dict()
class AX5051RegisterNames:
REVISION = 0x0
SCRATCH = 0x1
POWERMODE = 0x2
XTALOSC = 0x3
FIFOCTRL = 0x4
FIFODATA = 0x5
IRQMASK = 0x6
IFMODE = 0x8
PINCFG1 = 0x0C
PINCFG2 = 0x0D
MODULATION = 0x10
ENCODING = 0x11
FRAMING = 0x12
CRCINIT3 = 0x14
CRCINIT2 = 0x15
CRCINIT1 = 0x16
CRCINIT0 = 0x17
FREQ3 = 0x20
FREQ2 = 0x21
FREQ1 = 0x22
FREQ0 = 0x23
FSKDEV2 = 0x25
FSKDEV1 = 0x26
FSKDEV0 = 0x27
IFFREQHI = 0x28
IFFREQLO = 0x29
PLLLOOP = 0x2C
PLLRANGING = 0x2D
PLLRNGCLK = 0x2E
TXPWR = 0x30
TXRATEHI = 0x31
TXRATEMID = 0x32
TXRATELO = 0x33
MODMISC = 0x34
FIFOCONTROL2 = 0x37
ADCMISC = 0x38
AGCTARGET = 0x39
AGCATTACK = 0x3A
AGCDECAY = 0x3B
AGCCOUNTER = 0x3C
CICDEC = 0x3F
DATARATEHI = 0x40
DATARATELO = 0x41
TMGGAINHI = 0x42
TMGGAINLO = 0x43
PHASEGAIN = 0x44
FREQGAIN = 0x45
FREQGAIN2 = 0x46
AMPLGAIN = 0x47
TRKFREQHI = 0x4C
TRKFREQLO = 0x4D
XTALCAP = 0x4F
SPAREOUT = 0x60
TESTOBS = 0x68
APEOVER = 0x70
TMMUX = 0x71
PLLVCOI = 0x72
PLLCPEN = 0x73
PLLRNGMISC = 0x74
AGCMANUAL = 0x78
ADCDCLEVEL = 0x79
RFMISC = 0x7A
TXDRIVER = 0x7B
REF = 0x7C
RXMISC = 0x7D
def __init__(self):
logdbg('CCommunicationService.init')
self.shid = sHID()
self.DataStore = CDataStore()
self.firstSleep = 1
self.nextSleep = 1
self.pollCount = 0
self.running = False
self.child = None
self.thread_wait = 60.0 # seconds
self.command = None
self.history_cache = HistoryCache()
# do not set time when offset to whole hour is <= _a3_offset
self._a3_offset = 3
def buildFirstConfigFrame(self, Buffer, cs):
logdbg('buildFirstConfigFrame: cs=%04x' % cs)
newBuffer = [0]
newBuffer[0] = [0]*9
comInt = self.DataStore.getCommModeInterval()
historyAddress = 0xFFFFFF
newBuffer[0][0] = 0xf0
newBuffer[0][1] = 0xf0
newBuffer[0][2] = EAction.aGetConfig
newBuffer[0][3] = (cs >> 8) & 0xff
newBuffer[0][4] = (cs >> 0) & 0xff
newBuffer[0][5] = (comInt >> 4) & 0xff
newBuffer[0][6] = (historyAddress >> 16) & 0x0f | 16 * (comInt & 0xf)
newBuffer[0][7] = (historyAddress >> 8 ) & 0xff
newBuffer[0][8] = (historyAddress >> 0 ) & 0xff
Buffer[0] = newBuffer[0]
Length = 0x09
return Length
def buildConfigFrame(self, Buffer):
logdbg("buildConfigFrame")
newBuffer = [0]
newBuffer[0] = [0]*48
cfgBuffer = [0]
cfgBuffer[0] = [0]*44
changed = self.DataStore.StationConfig.testConfigChanged(cfgBuffer)
if changed:
self.shid.dump('OutBuf', cfgBuffer[0], fmt='long')
newBuffer[0][0] = Buffer[0][0]
newBuffer[0][1] = Buffer[0][1]
newBuffer[0][2] = EAction.aSendConfig # 0x40 # change this value if we won't store config
newBuffer[0][3] = Buffer[0][3]
for i in xrange(0,44):
newBuffer[0][i+4] = cfgBuffer[0][i]
Buffer[0] = newBuffer[0]
Length = 48 # 0x30
else: # current config not up to date; do not write yet
Length = 0
return Length
def buildTimeFrame(self, Buffer, cs):
logdbg("buildTimeFrame: cs=%04x" % cs)
now = time.time()
tm = time.localtime(now)
newBuffer=[0]
newBuffer[0]=Buffer[0]
#00000000: d5 00 0c 00 32 c0 00 8f 45 25 15 91 31 20 01 00
#00000000: d5 00 0c 00 32 c0 06 c1 47 25 15 91 31 20 01 00
# 3 4 5 6 7 8 9 10 11
newBuffer[0][2] = EAction.aSendTime # 0xc0
newBuffer[0][3] = (cs >> 8) & 0xFF
newBuffer[0][4] = (cs >> 0) & 0xFF
newBuffer[0][5] = (tm[5] % 10) + 0x10 * (tm[5] // 10) #sec
newBuffer[0][6] = (tm[4] % 10) + 0x10 * (tm[4] // 10) #min
newBuffer[0][7] = (tm[3] % 10) + 0x10 * (tm[3] // 10) #hour
#DayOfWeek = tm[6] - 1; #ole from 1 - 7 - 1=Sun... 0-6 0=Sun
DayOfWeek = tm[6] #py from 0 - 6 - 0=Mon
newBuffer[0][8] = DayOfWeek % 10 + 0x10 * (tm[2] % 10) #DoW + Day
newBuffer[0][9] = (tm[2] // 10) + 0x10 * (tm[1] % 10) #day + month
newBuffer[0][10] = (tm[1] // 10) + 0x10 * ((tm[0] - 2000) % 10) #month + year
newBuffer[0][11] = (tm[0] - 2000) // 10 #year
Buffer[0]=newBuffer[0]
Length = 0x0c
return Length
def buildACKFrame(self, Buffer, action, cs, hidx=None):
if DEBUG_COMM > 1:
logdbg("buildACKFrame: action=%x cs=%04x historyIndex=%s" %
(action, cs, hidx))
newBuffer = [0]
newBuffer[0] = [0]*9
for i in xrange(0,2):
newBuffer[0][i] = Buffer[0][i]
comInt = self.DataStore.getCommModeInterval()
# When last weather is stale, change action to get current weather
# This is only needed during long periods of history data catchup
if self.command == EAction.aGetHistory:
now = int(time.time())
age = now - self.DataStore.LastStat.last_weather_ts
# Morphing action only with GetHistory requests,
# and stale data after a period of twice the CommModeInterval,
# but not with init GetHistory requests (0xF0)
if action == EAction.aGetHistory and age >= (comInt +1) * 2 and newBuffer[0][1] != 0xF0:
if DEBUG_COMM > 0:
logdbg('buildACKFrame: morphing action from %d to 5 (age=%s)' % (action, age))
action = EAction.aGetCurrent
if hidx is None:
if self.command == EAction.aGetHistory:
hidx = self.history_cache.next_index
elif self.DataStore.getLastHistoryIndex() is not None:
hidx = self.DataStore.getLastHistoryIndex()
if hidx is None or hidx < 0 or hidx >= WS28xxDriver.max_records:
haddr = 0xffffff
else:
haddr = index_to_addr(hidx)
if DEBUG_COMM > 1:
logdbg('buildACKFrame: idx: %s addr: 0x%04x' % (hidx, haddr))
newBuffer[0][2] = action & 0xF
newBuffer[0][3] = (cs >> 8) & 0xFF
newBuffer[0][4] = (cs >> 0) & 0xFF
newBuffer[0][5] = (comInt >> 4) & 0xFF
newBuffer[0][6] = (haddr >> 16) & 0x0F | 16 * (comInt & 0xF)
newBuffer[0][7] = (haddr >> 8 ) & 0xFF
newBuffer[0][8] = (haddr >> 0 ) & 0xFF
#d5 00 09 f0 f0 03 00 32 00 3f ff ff
Buffer[0]=newBuffer[0]
return 9
def handleWsAck(self,Buffer,Length):
logdbg('handleWsAck')
self.DataStore.setLastStatCache(seen_ts=int(time.time()),
quality=(Buffer[0][3] & 0x7f),
battery=(Buffer[0][2] & 0xf))
def handleConfig(self,Buffer,Length):
logdbg('handleConfig: %s' % self.timing())
if DEBUG_CONFIG_DATA > 2:
self.shid.dump('InBuf', Buffer[0], fmt='long')
newBuffer=[0]
newBuffer[0] = Buffer[0]
newLength = [0]
now = int(time.time())
self.DataStore.StationConfig.read(newBuffer)
if DEBUG_CONFIG_DATA > 1:
self.DataStore.StationConfig.toLog()
self.DataStore.setLastStatCache(seen_ts=now,
quality=(Buffer[0][3] & 0x7f),
battery=(Buffer[0][2] & 0xf),
config_ts=now)
cs = newBuffer[0][47] | (newBuffer[0][46] << 8)
self.setSleep(0.300,0.010)
newLength[0] = self.buildACKFrame(newBuffer, EAction.aGetHistory, cs)
Buffer[0] = newBuffer[0]
Length[0] = newLength[0]
def handleCurrentData(self,Buffer,Length):
if DEBUG_WEATHER_DATA > 0:
logdbg('handleCurrentData: %s' % self.timing())
now = int(time.time())
# update the weather data cache if changed or stale
chksum = CCurrentWeatherData.calcChecksum(Buffer)
age = now - self.DataStore.LastStat.last_weather_ts
if age >= 10 or chksum != self.DataStore.CurrentWeather.checksum():
if DEBUG_WEATHER_DATA > 2:
self.shid.dump('CurWea', Buffer[0], fmt='long')
data = CCurrentWeatherData()
data.read(Buffer)
self.DataStore.setCurrentWeather(data)
if DEBUG_WEATHER_DATA > 1:
data.toLog()
# update the connection cache
self.DataStore.setLastStatCache(seen_ts=now,
quality=(Buffer[0][3] & 0x7f),
battery=(Buffer[0][2] & 0xf),
weather_ts=now)
newBuffer = [0]
newBuffer[0] = Buffer[0]
newLength = [0]
cs = newBuffer[0][5] | (newBuffer[0][4] << 8)
cfgBuffer = [0]
cfgBuffer[0] = [0]*44
changed = self.DataStore.StationConfig.testConfigChanged(cfgBuffer)
inBufCS = self.DataStore.StationConfig.getInBufCS()
if inBufCS == 0 or inBufCS != cs:
# request for a get config
logdbg('handleCurrentData: inBufCS of station does not match')
self.setSleep(0.300,0.010)
newLength[0] = self.buildACKFrame(newBuffer, EAction.aGetConfig, cs)
elif changed:
# Request for a set config
logdbg('handleCurrentData: outBufCS of station changed')
self.setSleep(0.300,0.010)
newLength[0] = self.buildACKFrame(newBuffer, EAction.aReqSetConfig, cs)
else:
# Request for either a history message or a current weather message
# In general we don't use EAction.aGetCurrent to ask for a current
# weather message; they also come when requested for
# EAction.aGetHistory. This we learned from the Heavy Weather Pro
# messages (via USB sniffer).
self.setSleep(0.300,0.010)
newLength[0] = self.buildACKFrame(newBuffer, EAction.aGetHistory, cs)
Length[0] = newLength[0]
Buffer[0] = newBuffer[0]
def handleHistoryData(self, buf, buflen):
if DEBUG_HISTORY_DATA > 0:
logdbg('handleHistoryData: %s' % self.timing())
now = int(time.time())
self.DataStore.setLastStatCache(seen_ts=now,
quality=(buf[0][3] & 0x7f),
battery=(buf[0][2] & 0xf),
history_ts=now)
newbuf = [0]
newbuf[0] = buf[0]
newlen = [0]
data = CHistoryData()
data.read(newbuf)
if DEBUG_HISTORY_DATA > 1:
data.toLog()
cs = newbuf[0][5] | (newbuf[0][4] << 8)
latestAddr = bytes_to_addr(buf[0][6], buf[0][7], buf[0][8])
thisAddr = bytes_to_addr(buf[0][9], buf[0][10], buf[0][11])
latestIndex = addr_to_index(latestAddr)
thisIndex = addr_to_index(thisAddr)
ts = tstr_to_ts(str(data.Time))
nrec = get_index(latestIndex - thisIndex)
logdbg('handleHistoryData: time=%s'
' this=%d (0x%04x) latest=%d (0x%04x) nrec=%d' %
(data.Time, thisIndex, thisAddr, latestIndex, latestAddr, nrec))
# track the latest history index
self.DataStore.setLastHistoryIndex(thisIndex)
self.DataStore.setLatestHistoryIndex(latestIndex)
nextIndex = None
if self.command == EAction.aGetHistory:
if self.history_cache.start_index is None:
nreq = 0
if self.history_cache.num_rec > 0:
loginf('handleHistoryData: request for %s records' %
self.history_cache.num_rec)
nreq = self.history_cache.num_rec
else:
loginf('handleHistoryData: request records since %s' %
weeutil.weeutil.timestamp_to_string(self.history_cache.since_ts))
span = int(time.time()) - self.history_cache.since_ts
# FIXME: what if we do not have config data yet?
cfg = self.getConfigData().asDict()
arcint = 60 * getHistoryInterval(cfg['history_interval'])
# FIXME: this assumes a constant archive interval for all
# records in the station history
nreq = int(span / arcint) + 5 # FIXME: punt 5
if nreq > nrec:
loginf('handleHistoryData: too many records requested (%d)'
', clipping to number stored (%d)' % (nreq, nrec))
nreq = nrec
idx = get_index(latestIndex - nreq)
self.history_cache.start_index = idx
self.history_cache.next_index = idx
self.DataStore.setLastHistoryIndex(idx)
self.history_cache.num_outstanding_records = nreq
logdbg('handleHistoryData: start_index=%s'
' num_outstanding_records=%s' % (idx, nreq))
nextIndex = idx
elif self.history_cache.next_index is not None:
# thisIndex should be the next record after next_index
thisIndexTst = get_next_index(self.history_cache.next_index)
if thisIndexTst == thisIndex:
self.history_cache.num_scanned += 1
# get the next history record
if ts is not None and self.history_cache.since_ts <= ts:
# Check if two records in a row with the same ts
if self.history_cache.last_ts == ts:
logdbg('handleHistoryData: remove previous record'
' with duplicate timestamp: %s' %
weeutil.weeutil.timestamp_to_string(ts))
self.history_cache.records.pop()
self.history_cache.last_ts = ts
# append to the history
logdbg('handleHistoryData: appending history record'
' %s: %s' % (thisIndex, data.asDict()))
self.history_cache.records.append(data.asDict())
self.history_cache.num_outstanding_records = nrec
elif ts is None:
logerr('handleHistoryData: skip record: this_ts=None')
else:
logdbg('handleHistoryData: skip record: since_ts=%s this_ts=%s' % (weeutil.weeutil.timestamp_to_string(self.history_cache.since_ts), weeutil.weeutil.timestamp_to_string(ts)))
self.history_cache.next_index = thisIndex
else:
loginf('handleHistoryData: index mismatch: %s != %s' %
(thisIndexTst, thisIndex))
nextIndex = self.history_cache.next_index
logdbg('handleHistoryData: next=%s' % nextIndex)
self.setSleep(0.300,0.010)
newlen[0] = self.buildACKFrame(newbuf, EAction.aGetHistory, cs, nextIndex)
buflen[0] = newlen[0]
buf[0] = newbuf[0]
def handleNextAction(self,Buffer,Length):
newBuffer = [0]
newBuffer[0] = Buffer[0]
newLength = [0]
newLength[0] = Length[0]
self.DataStore.setLastStatCache(seen_ts=int(time.time()),
quality=(Buffer[0][3] & 0x7f))
cs = newBuffer[0][5] | (newBuffer[0][4] << 8)
if (Buffer[0][2] & 0xEF) == EResponseType.rtReqFirstConfig:
logdbg('handleNextAction: a1 (first-time config)')
self.setSleep(0.085,0.005)
newLength[0] = self.buildFirstConfigFrame(newBuffer, cs)
elif (Buffer[0][2] & 0xEF) == EResponseType.rtReqSetConfig:
logdbg('handleNextAction: a2 (set config data)')
self.setSleep(0.085,0.005)
newLength[0] = self.buildConfigFrame(newBuffer)
elif (Buffer[0][2] & 0xEF) == EResponseType.rtReqSetTime:
logdbg('handleNextAction: a3 (set time data)')
now = int(time.time())
age = now - self.DataStore.LastStat.last_weather_ts
if age >= (self.DataStore.getCommModeInterval() +1) * 2:
# always set time if init or stale communication
self.setSleep(0.085,0.005)
newLength[0] = self.buildTimeFrame(newBuffer, cs)
else:
# When time is set at the whole hour we may get an extra
# historical record with time stamp a history period ahead
# We will skip settime if offset to whole hour is too small
# (time difference between WS and server < self._a3_offset)
m, s = divmod(now, 60)
h, m = divmod(m, 60)
logdbg('Time: hh:%02d:%02d' % (m,s))
if (m == 59 and s >= (60 - self._a3_offset)) or (m == 0 and s <= self._a3_offset):
logdbg('Skip settime; time difference <= %s s' % int(self._a3_offset))
self.setSleep(0.300,0.010)
newLength[0] = self.buildACKFrame(newBuffer, EAction.aGetHistory, cs)
else:
# set time
self.setSleep(0.085,0.005)
newLength[0] = self.buildTimeFrame(newBuffer, cs)
else:
logdbg('handleNextAction: %02x' % (Buffer[0][2] & 0xEF))
self.setSleep(0.300,0.010)
newLength[0] = self.buildACKFrame(newBuffer, EAction.aGetHistory, cs)
Length[0] = newLength[0]
Buffer[0] = newBuffer[0]
def generateResponse(self, Buffer, Length):
if DEBUG_COMM > 1:
logdbg('generateResponse: %s' % self.timing())
newBuffer = [0]
newBuffer[0] = Buffer[0]
newLength = [0]
newLength[0] = Length[0]
if Length[0] == 0:
raise BadResponse('zero length buffer')
bufferID = (Buffer[0][0] <<8) | Buffer[0][1]
respType = (Buffer[0][2] & 0xE0)
if DEBUG_COMM > 1:
logdbg("generateResponse: id=%04x resp=%x length=%x" %
(bufferID, respType, Length[0]))
deviceID = self.DataStore.getDeviceID()
if bufferID != 0xF0F0:
self.DataStore.setRegisteredDeviceID(bufferID)
if bufferID == 0xF0F0:
loginf('generateResponse: console not paired, attempting to pair to 0x%04x' % deviceID)
newLength[0] = self.buildACKFrame(newBuffer, EAction.aGetConfig, deviceID, 0xFFFF)
elif bufferID == deviceID:
if respType == EResponseType.rtDataWritten:
# 00000000: 00 00 06 00 32 20
if Length[0] == 0x06:
self.DataStore.StationConfig.setResetMinMaxFlags(0)
self.shid.setRX()
raise DataWritten()
else:
raise BadResponse('len=%x resp=%x' % (Length[0], respType))
elif respType == EResponseType.rtGetConfig:
# 00000000: 00 00 30 00 32 40
if Length[0] == 0x30:
self.handleConfig(newBuffer, newLength)
else:
raise BadResponse('len=%x resp=%x' % (Length[0], respType))
elif respType == EResponseType.rtGetCurrentWeather:
# 00000000: 00 00 d7 00 32 60
if Length[0] == 0xd7: #215
self.handleCurrentData(newBuffer, newLength)
else:
raise BadResponse('len=%x resp=%x' % (Length[0], respType))
elif respType == EResponseType.rtGetHistory:
# 00000000: 00 00 1e 00 32 80
if Length[0] == 0x1e:
self.handleHistoryData(newBuffer, newLength)
else:
raise BadResponse('len=%x resp=%x' % (Length[0], respType))
elif respType == EResponseType.rtRequest:
# 00000000: 00 00 06 f0 f0 a1
# 00000000: 00 00 06 00 32 a3
# 00000000: 00 00 06 00 32 a2
if Length[0] == 0x06:
self.handleNextAction(newBuffer, newLength)
else:
raise BadResponse('len=%x resp=%x' % (Length[0], respType))
else:
raise BadResponse('unexpected response type %x' % respType)
elif respType not in [0x20,0x40,0x60,0x80,0xa1,0xa2,0xa3]:
# message is probably corrupt
raise BadResponse('unknown response type %x' % respType)
else:
msg = 'message from console contains unknown device ID (id=%04x resp=%x)' % (bufferID, respType)
logdbg(msg)
log_frame(Length[0],Buffer[0])
raise BadResponse(msg)
Buffer[0] = newBuffer[0]
Length[0] = newLength[0]
def configureRegisterNames(self):
self.reg_names[self.AX5051RegisterNames.IFMODE] =0x00
self.reg_names[self.AX5051RegisterNames.MODULATION]=0x41 #fsk
self.reg_names[self.AX5051RegisterNames.ENCODING] =0x07
self.reg_names[self.AX5051RegisterNames.FRAMING] =0x84 #1000:0100 ##?hdlc? |1000 010 0
self.reg_names[self.AX5051RegisterNames.CRCINIT3] =0xff
self.reg_names[self.AX5051RegisterNames.CRCINIT2] =0xff
self.reg_names[self.AX5051RegisterNames.CRCINIT1] =0xff
self.reg_names[self.AX5051RegisterNames.CRCINIT0] =0xff
self.reg_names[self.AX5051RegisterNames.FREQ3] =0x38
self.reg_names[self.AX5051RegisterNames.FREQ2] =0x90
self.reg_names[self.AX5051RegisterNames.FREQ1] =0x00
self.reg_names[self.AX5051RegisterNames.FREQ0] =0x01
self.reg_names[self.AX5051RegisterNames.PLLLOOP] =0x1d
self.reg_names[self.AX5051RegisterNames.PLLRANGING]=0x08
self.reg_names[self.AX5051RegisterNames.PLLRNGCLK] =0x03
self.reg_names[self.AX5051RegisterNames.MODMISC] =0x03
self.reg_names[self.AX5051RegisterNames.SPAREOUT] =0x00
self.reg_names[self.AX5051RegisterNames.TESTOBS] =0x00
self.reg_names[self.AX5051RegisterNames.APEOVER] =0x00
self.reg_names[self.AX5051RegisterNames.TMMUX] =0x00
self.reg_names[self.AX5051RegisterNames.PLLVCOI] =0x01
self.reg_names[self.AX5051RegisterNames.PLLCPEN] =0x01
self.reg_names[self.AX5051RegisterNames.RFMISC] =0xb0
self.reg_names[self.AX5051RegisterNames.REF] =0x23
self.reg_names[self.AX5051RegisterNames.IFFREQHI] =0x20
self.reg_names[self.AX5051RegisterNames.IFFREQLO] =0x00
self.reg_names[self.AX5051RegisterNames.ADCMISC] =0x01
self.reg_names[self.AX5051RegisterNames.AGCTARGET] =0x0e
self.reg_names[self.AX5051RegisterNames.AGCATTACK] =0x11
self.reg_names[self.AX5051RegisterNames.AGCDECAY] =0x0e
self.reg_names[self.AX5051RegisterNames.CICDEC] =0x3f
self.reg_names[self.AX5051RegisterNames.DATARATEHI]=0x19
self.reg_names[self.AX5051RegisterNames.DATARATELO]=0x66
self.reg_names[self.AX5051RegisterNames.TMGGAINHI] =0x01
self.reg_names[self.AX5051RegisterNames.TMGGAINLO] =0x96
self.reg_names[self.AX5051RegisterNames.PHASEGAIN] =0x03
self.reg_names[self.AX5051RegisterNames.FREQGAIN] =0x04
self.reg_names[self.AX5051RegisterNames.FREQGAIN2] =0x0a
self.reg_names[self.AX5051RegisterNames.AMPLGAIN] =0x06
self.reg_names[self.AX5051RegisterNames.AGCMANUAL] =0x00
self.reg_names[self.AX5051RegisterNames.ADCDCLEVEL]=0x10
self.reg_names[self.AX5051RegisterNames.RXMISC] =0x35
self.reg_names[self.AX5051RegisterNames.FSKDEV2] =0x00
self.reg_names[self.AX5051RegisterNames.FSKDEV1] =0x31
self.reg_names[self.AX5051RegisterNames.FSKDEV0] =0x27
self.reg_names[self.AX5051RegisterNames.TXPWR] =0x03
self.reg_names[self.AX5051RegisterNames.TXRATEHI] =0x00
self.reg_names[self.AX5051RegisterNames.TXRATEMID] =0x51
self.reg_names[self.AX5051RegisterNames.TXRATELO] =0xec
self.reg_names[self.AX5051RegisterNames.TXDRIVER] =0x88
def initTransceiver(self, frequency_standard):
logdbg('initTransceiver: frequency_standard=%s' % frequency_standard)
self.DataStore.setFrequencyStandard(frequency_standard)
self.configureRegisterNames()
# calculate the frequency then set frequency registers
freq = self.DataStore.TransceiverSettings.Frequency
loginf('base frequency: %d' % freq)
freqVal = long(freq / 16000000.0 * 16777216.0)
corVec = [None]
self.shid.readConfigFlash(0x1F5, 4, corVec)
corVal = corVec[0][0] << 8
corVal |= corVec[0][1]
corVal <<= 8
corVal |= corVec[0][2]
corVal <<= 8
corVal |= corVec[0][3]
loginf('frequency correction: %d (0x%x)' % (corVal,corVal))
freqVal += corVal
if not (freqVal % 2):
freqVal += 1
loginf('adjusted frequency: %d (0x%x)' % (freqVal,freqVal))
self.reg_names[self.AX5051RegisterNames.FREQ3] = (freqVal >>24) & 0xFF
self.reg_names[self.AX5051RegisterNames.FREQ2] = (freqVal >>16) & 0xFF
self.reg_names[self.AX5051RegisterNames.FREQ1] = (freqVal >>8) & 0xFF
self.reg_names[self.AX5051RegisterNames.FREQ0] = (freqVal >>0) & 0xFF
logdbg('frequency registers: %x %x %x %x' % (
self.reg_names[self.AX5051RegisterNames.FREQ3],
self.reg_names[self.AX5051RegisterNames.FREQ2],
self.reg_names[self.AX5051RegisterNames.FREQ1],
self.reg_names[self.AX5051RegisterNames.FREQ0]))
# figure out the transceiver id
buf = [None]
self.shid.readConfigFlash(0x1F9, 7, buf)
tid = buf[0][5] << 8
tid += buf[0][6]
loginf('transceiver identifier: %d (0x%04x)' % (tid,tid))
self.DataStore.setDeviceID(tid)
# figure out the transceiver serial number
sn = str("%02d"%(buf[0][0]))
sn += str("%02d"%(buf[0][1]))
sn += str("%02d"%(buf[0][2]))
sn += str("%02d"%(buf[0][3]))
sn += str("%02d"%(buf[0][4]))
sn += str("%02d"%(buf[0][5]))
sn += str("%02d"%(buf[0][6]))
loginf('transceiver serial: %s' % sn)
self.DataStore.setTransceiverSerNo(sn)
for r in self.reg_names:
self.shid.writeReg(r, self.reg_names[r])
def setup(self, frequency_standard,
vendor_id, product_id, device_id, serial,
comm_interval=3):
self.DataStore.setCommModeInterval(comm_interval)
self.shid.open(vendor_id, product_id, device_id, serial)
self.initTransceiver(frequency_standard)
self.DataStore.setTransceiverPresent(True)
def teardown(self):
self.shid.close()
# FIXME: make this thread-safe
def getWeatherData(self):
return self.DataStore.CurrentWeather
# FIXME: make this thread-safe
def getLastStat(self):
return self.DataStore.LastStat
# FIXME: make this thread-safe
def getConfigData(self):
return self.DataStore.StationConfig
def startCachingHistory(self, since_ts=0, num_rec=0):
self.history_cache.clear_records()
if since_ts is None:
since_ts = 0
self.history_cache.since_ts = since_ts
if num_rec > WS28xxDriver.max_records - 2:
num_rec = WS28xxDriver.max_records - 2
self.history_cache.num_rec = num_rec
self.command = EAction.aGetHistory
def stopCachingHistory(self):
self.command = None
def getUncachedHistoryCount(self):
return self.history_cache.num_outstanding_records
def getNextHistoryIndex(self):
return self.history_cache.next_index
def getNumHistoryScanned(self):
return self.history_cache.num_scanned
def getLatestHistoryIndex(self):
return self.DataStore.LastStat.LatestHistoryIndex
def getHistoryCacheRecords(self):
return self.history_cache.records
def clearHistoryCache(self):
self.history_cache.clear_records()
def startRFThread(self):
if self.child is not None:
return
logdbg('startRFThread: spawning RF thread')
self.running = True
self.child = threading.Thread(target=self.doRF)
self.child.setName('RFComm')
self.child.setDaemon(True)
self.child.start()
def stopRFThread(self):
self.running = False
logdbg('stopRFThread: waiting for RF thread to terminate')
self.child.join(self.thread_wait)
if self.child.isAlive():
logerr('unable to terminate RF thread after %d seconds' %
self.thread_wait)
else:
self.child = None
def isRunning(self):
return self.running
def doRF(self):
try:
logdbg('setting up rf communication')
self.doRFSetup()
logdbg('starting rf communication')
while self.running:
self.doRFCommunication()
except Exception, e:
logerr('exception in doRF: %s' % e)
if weewx.debug:
log_traceback(dst=syslog.LOG_DEBUG)
self.running = False
raise
finally:
logdbg('stopping rf communication')
# it is probably not necessary to have two setPreamblePattern invocations.
# however, HeavyWeatherPro seems to do it this way on a first time config.
# doing it this way makes configuration easier during a factory reset and
# when re-establishing communication with the station sensors.
def doRFSetup(self):
self.shid.execute(5)
self.shid.setPreamblePattern(0xaa)
self.shid.setState(0)
time.sleep(1)
self.shid.setRX()
self.shid.setPreamblePattern(0xaa)
self.shid.setState(0x1e)
time.sleep(1)
self.shid.setRX()
self.setSleep(0.085,0.005)
def doRFCommunication(self):
time.sleep(self.firstSleep)
self.pollCount = 0
while self.running:
StateBuffer = [None]
self.shid.getState(StateBuffer)
self.pollCount += 1
if StateBuffer[0][0] == 0x16:
break
time.sleep(self.nextSleep)
else:
return
DataLength = [0]
DataLength[0] = 0
FrameBuffer=[0]
FrameBuffer[0]=[0]*0x03
self.shid.getFrame(FrameBuffer, DataLength)
try:
self.generateResponse(FrameBuffer, DataLength)
self.shid.setFrame(FrameBuffer[0], DataLength[0])
except BadResponse, e:
logerr('generateResponse failed: %s' % e)
except DataWritten, e:
logdbg('SetTime/SetConfig data written')
self.shid.setTX()
# these are for diagnostics and debugging
def setSleep(self, firstsleep, nextsleep):
self.firstSleep = firstsleep
self.nextSleep = nextsleep
def timing(self):
s = self.firstSleep + self.nextSleep * (self.pollCount - 1)
return 'sleep=%s first=%s next=%s count=%s' % (
s, self.firstSleep, self.nextSleep, self.pollCount)
| sai9/weewx-gitsvn | bin/weewx/drivers/ws28xx.py | Python | gpl-3.0 | 174,398 |
#
# logutil.py
# A module containing means of interacting with log files.
#
import logging
import logging.handlers
import os
import time
from data_structures import enum
from config import get_config_value
LoggingSection = enum(
'CLIENT',
'CRAWLER',
'DATA',
'FRONTIER',
'TEST',
'UTILITIES',
)
#region Setup
logging.basicConfig(level=logging.INFO,
format='[%(asctime)s %(levelname)s] %(name)s::%(funcName)s - %(message)s',
datefmt='%x %X %Z')
module_dir = os.path.dirname(__file__)
logfile = os.path.join(module_dir, get_config_value('LOG', 'path'))
logdir = os.path.join(module_dir, get_config_value('LOG', 'dir'))
if not os.path.exists(logdir):
os.mkdir(logdir)
handler = logging.handlers.RotatingFileHandler(logfile,
maxBytes=8192,
backupCount=10, )
formatter = logging.Formatter('[%(asctime)s %(levelname)s] %(name)s::%(funcName)s - %(message)s')
formatter.datefmt = '%x %X %Z'
formatter.converter = time.gmtime
handler.setFormatter(formatter)
#endregion
def get_logger(section, name):
"""
Fetches a logger.
Arguments:
section (string): The section the logger is attributed to.
name (string): The name of the logger.
Returns:
The logger corresponding to the section and name provided.
"""
section_name = LoggingSection.reverse_mapping[section].lower()
logger = logging.getLogger('htresearch.{0}.{1}'.format(section_name, name))
logger.addHandler(handler)
logger.setLevel(logging.INFO)
return logger | mizhgun/HTResearch | HTResearch/Utilities/logutil.py | Python | gpl-3.0 | 1,640 |
#!/usr/bin/env python
'''
Purpose:
This script, using default values, determines and plots the CpG islands in
relation to a given feature "type" (e.g. "gene" or "mRNA") from a GFF file
which corresponds to the user-provided fasta file.
Note:
CpG Islands are determined by ObEx = (Observed CpG) / (Expected CpG) ,
default threshold > 1.
Where Expected CpG = (count(C) * count(G)) / WindowSize
Usage:
python cpg_gene.py FastaFile Gff_File OutFile.png
Default optional parameters:
-s, Step Size, default = 50
-w, Window Size, default = 200
-oe, Minimum Observed Expected CpG, default = 1
-gc, Minimum GC, default = .5
-r Range from ATG, or provided feature, default = 5000
-f, GFF Feature, default = "gene"
-i, Gene ID from GFF, default = ""
'''
import sys
import os
import argparse
from collections import Counter
from Bio import SeqIO
import cpgmod
import gffutils
import pandas as pd
import numpy as np
from ggplot import *
# Capture command line args, with or without defaults
if __name__ == '__main__':
# Parse the arguments
LineArgs = cpgmod.parseArguments()
# Populate vars with args
FastaFile = LineArgs.FastaFile
GffFile = LineArgs.GffFile
OutFile = LineArgs.FileOut
Step = LineArgs.s
WinSize = LineArgs.w
ObExthresh = LineArgs.oe
GCthresh = LineArgs.gc
StartRange = LineArgs.r
FeatGFF = LineArgs.f
ID_Feat = LineArgs.i
# Gather all possible CpG islands
MergedRecs = []
print "Parsing sequences...\n"
for SeqRecord in SeqIO.parse(FastaFile, "fasta"):
print SeqRecord.id
# Determine if sequences and args are acceptable
cpgmod.arg_seqcheck(SeqRecord, WinSize, Step)
# Pre-determine number of islands
NumOfChunks = cpgmod.chunks(SeqRecord, WinSize, Step)
# Return array of SeqRec class (potential CpG island) instances
SeqRecList = cpgmod.compute(SeqRecord, Step, NumOfChunks, WinSize)
MergedRecs = MergedRecs + SeqRecList
# Create GFF DB
GffDb = gffutils.create_db(GffFile, dbfn='GFF.db', force=True, keep_order=True,
merge_strategy='merge', sort_attribute_values=True,
disable_infer_transcripts=True,
disable_infer_genes=True)
print "\nGFF Database Created...\n"
# Filter out SeqRec below threshold
DistArr = []
for Rec in MergedRecs:
Cond1 = Rec.expect() > 0
if Cond1 == True:
ObEx = (Rec.observ() / Rec.expect())
Cond2 = ObEx > ObExthresh
Cond3 = Rec.gc_cont() > GCthresh
if Cond2 and Cond3:
# Query GFF DB for closest gene feature *or provided feature*
Arr = cpgmod.get_closest(Rec, GffDb, StartRange, FeatGFF, ID_Feat)
if Arr <> False:
Arr.append(ObEx)
DistArr.append(Arr)
print "CpG Islands predicted...\n"
print "Generating Figure...\n"
# Releasing SeqRecs
MergedRecs = None
SeqRecList = None
# Pre-check DistArr Results
if len(DistArr) < 2:
print "WARNING, "+ str(len(DistArr)) + " sites were found."
print "Consider changing parameters.\n"
# Generate Figure:
ObExRes = pd.DataFrame({
'gene' : [],
'xval': [],
'yval': []})
try:
Cnt = 0
for Dist in DistArr:
Cnt += 1
print "PROGRESS: "+str(Cnt) +" of "+ str(len(DistArr))
ObExdf = pd.DataFrame({
'gene': [Dist[2]],
'xval': [Dist[1]],
'yval': [Dist[3]]})
ObExFram = [ObExRes, ObExdf]
ObExRes = pd.concat(ObExFram, ignore_index=True)
p = ggplot(aes(x='xval', y='yval'), data=ObExRes) \
+ geom_point() \
+ ylab("Observed/Expected CpG") \
+ xlab("Position (bp) Relative to (ATG = 0)") \
+ ggtitle("Predicted CpG Island Position Relative to ATG")
p.save(OutFile)
except IndexError as e:
print 'Error: '+ str(e)
sys.exit('Exiting script...')
print p
# Remove GFF DB
os.remove('GFF.db')
| juswilliams/bioscripts | CpG_by_feature/cpg_gene.py | Python | gpl-3.0 | 3,970 |
import numpy as np
from scipy import sparse
from scipy.interpolate import interp1d
class calibration(object):
'''
some useful tools for manual calibration
'''
def normalize_zdata(self,z_data,cal_z_data):
return z_data/cal_z_data
def normalize_amplitude(self,z_data,cal_ampdata):
return z_data/cal_ampdata
def normalize_phase(self,z_data,cal_phase):
return z_data*np.exp(-1j*cal_phase)
def normalize_by_func(self,f_data,z_data,func):
return z_data/func(f_data)
def _baseline_als(self,y, lam, p, niter=10):
'''
see http://zanran_storage.s3.amazonaws.com/www.science.uva.nl/ContentPages/443199618.pdf
"Asymmetric Least Squares Smoothing" by P. Eilers and H. Boelens in 2005.
http://stackoverflow.com/questions/29156532/python-baseline-correction-library
"There are two parameters: p for asymmetry and lambda for smoothness. Both have to be
tuned to the data at hand. We found that generally 0.001<=p<=0.1 is a good choice
(for a trace with positive peaks) and 10e2<=lambda<=10e9, but exceptions may occur."
'''
L = len(y)
D = sparse.csc_matrix(np.diff(np.eye(L), 2))
w = np.ones(L)
for i in range(niter):
W = sparse.spdiags(w, 0, L, L)
Z = W + lam * D.dot(D.transpose())
z = sparse.linalg.spsolve(Z, w*y)
w = p * (y > z) + (1-p) * (y < z)
return z
def fit_baseline_amp(self,z_data,lam,p,niter=10):
'''
for this to work, you need to analyze a large part of the baseline
tune lam and p until you get the desired result
'''
return self._baseline_als(np.absolute(z_data),lam,p,niter=niter)
def baseline_func_amp(self,z_data,f_data,lam,p,niter=10):
'''
for this to work, you need to analyze a large part of the baseline
tune lam and p until you get the desired result
returns the baseline as a function
the points in between the datapoints are computed by cubic interpolation
'''
return interp1d(f_data, self._baseline_als(np.absolute(z_data),lam,p,niter=niter), kind='cubic')
def baseline_func_phase(self,z_data,f_data,lam,p,niter=10):
'''
for this to work, you need to analyze a large part of the baseline
tune lam and p until you get the desired result
returns the baseline as a function
the points in between the datapoints are computed by cubic interpolation
'''
return interp1d(f_data, self._baseline_als(np.angle(z_data),lam,p,niter=niter), kind='cubic')
def fit_baseline_phase(self,z_data,lam,p,niter=10):
'''
for this to work, you need to analyze a large part of the baseline
tune lam and p until you get the desired result
'''
return self._baseline_als(np.angle(z_data),lam,p,niter=niter)
def GUIbaselinefit(self):
'''
A GUI to help you fit the baseline
'''
self.__lam = 1e6
self.__p = 0.9
niter = 10
self.__baseline = self._baseline_als(np.absolute(self.z_data_raw),self.__lam,self.__p,niter=niter)
import matplotlib.pyplot as plt
from matplotlib.widgets import Slider
fig, (ax0,ax1) = plt.subplots(nrows=2)
plt.suptitle('Use the sliders to make the green curve match the baseline.')
plt.subplots_adjust(left=0.25, bottom=0.25)
l0, = ax0.plot(np.absolute(self.z_data_raw))
l0b, = ax0.plot(np.absolute(self.__baseline))
l1, = ax1.plot(np.absolute(self.z_data_raw/self.__baseline))
ax0.set_ylabel('amp, rawdata vs. baseline')
ax1.set_ylabel('amp, corrected')
axcolor = 'lightgoldenrodyellow'
axSmooth = plt.axes([0.25, 0.1, 0.65, 0.03], axisbg=axcolor)
axAsym = plt.axes([0.25, 0.15, 0.65, 0.03], axisbg=axcolor)
axbcorr = plt.axes([0.25, 0.05, 0.65, 0.03], axisbg=axcolor)
sSmooth = Slider(axSmooth, 'Smoothness', 0.1, 10., valinit=np.log10(self.__lam),valfmt='1E%f')
sAsym = Slider(axAsym, 'Asymmetry', 1e-4,0.99999, valinit=self.__p,valfmt='%f')
sbcorr = Slider(axbcorr, 'vertical shift',0.7,1.1,valinit=1.)
def update(val):
self.__lam = 10**sSmooth.val
self.__p = sAsym.val
self.__baseline = sbcorr.val*self._baseline_als(np.absolute(self.z_data_raw),self.__lam,self.__p,niter=niter)
l0.set_ydata(np.absolute(self.z_data_raw))
l0b.set_ydata(np.absolute(self.__baseline))
l1.set_ydata(np.absolute(self.z_data_raw/self.__baseline))
fig.canvas.draw_idle()
sSmooth.on_changed(update)
sAsym.on_changed(update)
sbcorr.on_changed(update)
plt.show()
self.z_data_raw /= self.__baseline
plt.close()
| vdrhtc/Measurement-automation | resonator_tools/resonator_tools/calibration.py | Python | gpl-3.0 | 4,324 |
## mostly copied from: http://norvig.com/spell-correct.html
import sys, random
import re, collections, time
TXT_FILE='';
BUF_DIR='';
NWORDS=None;
def words(text): return re.findall('[a-z]+', text)
def train(features):
model = collections.defaultdict(lambda: 1)
for f in features:
model[f] += 1
return model
alphabet = 'abcdefghijklmnopqrstuvwxyz'
def edits1(word):
splits = [(word[:i], word[i:]) for i in range(len(word) + 1)]
deletes = [a + b[1:] for a, b in splits if b]
transposes = [a + b[1] + b[0] + b[2:] for a, b in splits if len(b)>1]
replaces = [a + c + b[1:] for a, b in splits for c in alphabet if b]
inserts = [a + c + b for a, b in splits for c in alphabet]
return set(deletes + transposes + replaces + inserts)
def known_edits2(word):
return set(e2 for e1 in edits1(word) for e2 in edits1(e1) if e2 in NWORDS)
def known(words): return set(w for w in words if w in NWORDS)
def correct(word):
candidates = known([word]) or known(edits1(word)) or known_edits2(word) or [word]
return max(candidates, key=NWORDS.get)
#######################################################################################
if __name__ == '__main__':
TXT_FILE = sys.argv[1]
t0 = time.clock()
o_words = words(file(TXT_FILE).read())
NWORDS = train(o_words)
#print time.clock() - t0, " seconds build time"
#print "dictionary size: %d" %len(NWORDS)
et1 = time.clock() - t0
t_count = 10
rl = o_words[0:t_count] #random.sample(o_words, t_count)
orl = [''.join(random.sample(word, len(word))) for word in o_words]
t1 = time.clock()
r_count = 10
for i in range(0, r_count):
for w1, w2 in zip(rl, orl):
correct(w1); correct(w2)
et2 = (time.clock() - t1)/t_count/r_count/2
print '%d\t%f\t%f' %(len(NWORDS), et1, et2)
#######################################################################################
print 'Done'
| xulesc/spellchecker | impl1.py | Python | gpl-3.0 | 1,898 |
#!/usr/bin/env python
# -*- coding: utf-8; py-indent-offset:4 -*-
###############################################################################
#
# Copyright (C) 2015 Daniel Rodriguez
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
###############################################################################
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import collections
import six
from .comminfo import CommissionInfo
from .position import Position
from .metabase import MetaParams
from .order import Order, BuyOrder, SellOrder
class BrokerBack(six.with_metaclass(MetaParams, object)):
params = (('cash', 10000.0), ('commission', CommissionInfo()),)
def __init__(self):
self.comminfo = dict()
self.init()
def init(self):
if None not in self.comminfo.keys():
self.comminfo = dict({None: self.p.commission})
self.startingcash = self.cash = self.p.cash
self.orders = list() # will only be appending
self.pending = collections.deque() # popleft and append(right)
self.positions = collections.defaultdict(Position)
self.notifs = collections.deque()
def getcash(self):
return self.cash
def setcash(self, cash):
self.startingcash = self.cash = self.p.cash = cash
def getcommissioninfo(self, data):
if data._name in self.comminfo:
return self.comminfo[data._name]
return self.comminfo[None]
def setcommission(self, commission=0.0, margin=None, mult=1.0, name=None):
comm = CommissionInfo(commission=commission, margin=margin, mult=mult)
self.comminfo[name] = comm
def addcommissioninfo(self, comminfo, name=None):
self.comminfo[name] = comminfo
def start(self):
self.init()
def stop(self):
pass
def cancel(self, order):
try:
self.pending.remove(order)
except ValueError:
# If the list didn't have the element we didn't cancel anything
return False
order.cancel()
self.notify(order)
return True
def getvalue(self, datas=None):
pos_value = 0.0
for data in datas or self.positions.keys():
comminfo = self.getcommissioninfo(data)
position = self.positions[data]
pos_value += comminfo.getvalue(position, data.close[0])
return self.cash + pos_value
def getposition(self, data):
return self.positions[data]
def submit(self, order):
# FIXME: When an order is submitted, a margin check
# requirement has to be done before it can be accepted. This implies
# going over the entire list of pending orders for all datas and
# existing positions, simulating order execution and ending up
# with a "cash" figure that can be used to check the margin requirement
# of the order. If not met, the order can be immediately rejected
order.pannotated = None
order.plen = len(order.data)
order.accept()
self.orders.append(order)
self.pending.append(order)
self.notify(order)
return order
def buy(self, owner, data,
size, price=None, plimit=None,
exectype=None, valid=None):
order = BuyOrder(owner=owner, data=data,
size=size, price=price, pricelimit=plimit,
exectype=exectype, valid=valid)
return self.submit(order)
def sell(self, owner, data,
size, price=None, plimit=None,
exectype=None, valid=None):
order = SellOrder(owner=owner, data=data,
size=size, price=price, pricelimit=plimit,
exectype=exectype, valid=valid)
return self.submit(order)
def _execute(self, order, dt, price):
# Orders are fully executed, get operation size
size = order.executed.remsize
# Get comminfo object for the data
comminfo = self.getcommissioninfo(order.data)
# Adjust position with operation size
position = self.positions[order.data]
oldpprice = position.price
psize, pprice, opened, closed = position.update(size, price)
abopened, abclosed = abs(opened), abs(closed)
# if part/all of a position has been closed, then there has been
# a profitandloss ... record it
pnl = comminfo.profitandloss(abclosed, oldpprice, price)
if closed:
# Adjust to returned value for closed items & acquired opened items
closedvalue = comminfo.getoperationcost(abclosed, price)
self.cash += closedvalue
# Calculate and substract commission
closedcomm = comminfo.getcomm_pricesize(abclosed, price)
self.cash -= closedcomm
# Re-adjust cash according to future-like movements
# Restore cash which was already taken at the start of the day
self.cash -= comminfo.cashadjust(abclosed,
price,
order.data.close[0])
# pnl = comminfo.profitandloss(oldpsize, oldpprice, price)
else:
closedvalue = closedcomm = 0.0
if opened:
openedvalue = comminfo.getoperationcost(abopened, price)
self.cash -= openedvalue
openedcomm = comminfo.getcomm_pricesize(abopened, price)
self.cash -= openedcomm
# Remove cash for the new opened contracts
self.cash += comminfo.cashadjust(abopened,
price,
order.data.close[0])
else:
openedvalue = openedcomm = 0.0
# Execute and notify the order
order.execute(dt, size, price,
closed, closedvalue, closedcomm,
opened, openedvalue, openedcomm,
comminfo.margin, pnl,
psize, pprice)
self.notify(order)
def notify(self, order):
self.notifs.append(order.clone())
def next(self):
for data, pos in self.positions.items():
# futures change cash in the broker in every bar
# to ensure margin requirements are met
comminfo = self.getcommissioninfo(data)
self.cash += comminfo.cashadjust(pos.size,
data.close[-1],
data.close[0])
# Iterate once over all elements of the pending queue
for i in range(len(self.pending)):
order = self.pending.popleft()
if order.expire():
self.notify(order)
continue
popen = order.data.tick_open or order.data.open[0]
phigh = order.data.tick_high or order.data.high[0]
plow = order.data.tick_low or order.data.low[0]
pclose = order.data.tick_close or order.data.close[0]
pcreated = order.created.price
plimit = order.created.pricelimit
if order.exectype == Order.Market:
self._execute(order, order.data.datetime[0], price=popen)
elif order.exectype == Order.Close:
self._try_exec_close(order, pclose)
elif order.exectype == Order.Limit:
self._try_exec_limit(order, popen, phigh, plow, pcreated)
elif order.exectype == Order.StopLimit and order.triggered:
self._try_exec_limit(order, popen, phigh, plow, plimit)
elif order.exectype == Order.Stop:
self._try_exec_stop(order, popen, phigh, plow, pcreated)
elif order.exectype == Order.StopLimit:
self._try_exec_stoplimit(order,
popen, phigh, plow, pclose,
pcreated, plimit)
if order.alive():
self.pending.append(order)
def _try_exec_close(self, order, pclose):
if len(order.data) > order.plen:
dt0 = order.data.datetime[0]
if dt0 > order.dteos:
if order.pannotated:
execdt = order.data.datetime[-1]
execprice = pannotated
else:
execdt = dt0
execprice = pclose
self._execute(order, execdt, price=execprice)
return
# If no exexcution has taken place ... annotate the closing price
order.pannotated = pclose
def _try_exec_limit(self, order, popen, phigh, plow, plimit):
if order.isbuy():
if plimit >= popen:
# open smaller/equal than requested - buy cheaper
self._execute(order, order.data.datetime[0], price=popen)
elif plimit >= plow:
# day low below req price ... match limit price
self._execute(order, order.data.datetime[0], price=plimit)
else: # Sell
if plimit <= popen:
# open greater/equal than requested - sell more expensive
self._execute(order, order.data.datetime[0], price=popen)
elif plimit <= phigh:
# day high above req price ... match limit price
self._execute(order, order.data.datetime[0], price=plimit)
def _try_exec_stop(self, order, popen, phigh, plow, pcreated):
if order.isbuy():
if popen >= pcreated:
# price penetrated with an open gap - use open
self._execute(order, order.data.datetime[0], price=popen)
elif phigh >= pcreated:
# price penetrated during the session - use trigger price
self._execute(order, order.data.datetime[0], price=pcreated)
else: # Sell
if popen <= pcreated:
# price penetrated with an open gap - use open
self._execute(order, order.data.datetime[0], price=popen)
elif plow <= pcreated:
# price penetrated during the session - use trigger price
self._execute(order, order.data.datetime[0], price=pcreated)
def _try_exec_stoplimit(self, order,
popen, phigh, plow, pclose,
pcreated, plimit):
if order.isbuy():
if popen >= pcreated:
order.triggered = True
# price penetrated with an open gap
if plimit >= popen:
self._execute(order, order.data.datetime[0], price=popen)
elif plimit >= plow:
# execute in same bar
self._execute(order, order.data.datetime[0], price=plimit)
elif phigh >= pcreated:
# price penetrated upwards during the session
order.triggered = True
# can calculate execution for a few cases - datetime is fixed
dt = order.data.datetime[0]
if popen > pclose:
if plimit >= pcreated:
self._execute(order, dt, price=pcreated)
elif plimit >= pclose:
self._execute(order, dt, price=plimit)
else: # popen < pclose
if plimit >= pcreated:
self._execute(order, dt, price=pcreated)
else: # Sell
if popen <= pcreated:
# price penetrated downwards with an open gap
order.triggered = True
if plimit <= open:
self._execute(order, order.data.datetime[0], price=popen)
elif plimit <= phigh:
# execute in same bar
self._execute(order, order.data.datetime[0], price=plimit)
elif plow <= pcreated:
# price penetrated downwards during the session
order.triggered = True
# can calculate execution for a few cases - datetime is fixed
dt = order.data.datetime[0]
if popen <= pclose:
if plimit <= pcreated:
self._execute(order, dt, price=pcreated)
elif plimit <= pclose:
self._execute(order, dt, price=plimit)
else:
# popen > pclose
if plimit <= pcreated:
self._execute(order, dt, price=pcreated)
| gnagel/backtrader | backtrader/broker.py | Python | gpl-3.0 | 13,299 |
#! /usr/bin/env python
import logging, logtool
from .page import Page
from .xlate_frame import XlateFrame
LOG = logging.getLogger (__name__)
class Contents:
@logtool.log_call
def __init__ (self, canvas, objects):
self.canvas = canvas
self.objects = objects
@logtool.log_call
def render (self):
with Page (self.canvas) as pg:
for obj in self.objects:
coords = pg.next (obj.asset)
with XlateFrame (self.canvas, obj.tile_type, *coords,
inset_by = "margin"):
# print ("Obj: ", obj.asset)
obj.render ()
| clearclaw/xxpaper | xxpaper/contents.py | Python | gpl-3.0 | 590 |
# -*- coding: utf-8 -*-
import logging
from pprint import pformat
from time import clock, sleep
try:
import unittest2 as unittest
except ImportError:
import unittest
import config
from event_stack import TimeOutReached
from database_reception import Database_Reception
from static_agent_pools import Receptionists, Customers
logging.basicConfig (level = logging.INFO)
class Test_Case (unittest.TestCase):
Caller = None
Receptionist = None
Receptionist_2 = None
Callee = None
Reception_Database = None
Reception = None
Start_Time = None
Next_Step = 1
def Preconditions (self, Reception):
self.Start_Time = clock ()
self.Next_Step = 1
self.Log ("Incoming calls test case: Setting up preconditions...")
self.Log ("Requesting a customer (caller)...")
self.Caller = Customers.request ()
self.Log ("Requesting a receptionist...")
self.Receptionist = Receptionists.request ()
self.Log ("Requesting a second receptionist...")
self.Receptionist_2 = Receptionists.request ()
self.Log ("Requesting a customer (callee)...")
self.Callee = Customers.request ()
self.Log ("Select which reception to test...")
self.Reception = Reception
self.Log ("Select a reception database connection...")
self.Reception_Database = Database_Reception (uri = config.reception_server_uri,
authtoken = self.Receptionist.call_control.authtoken)
def Postprocessing (self):
self.Log ("Incoming calls test case: Cleaning up after test...")
if not self.Caller is None:
self.Caller.release ()
if not self.Receptionist is None:
self.Receptionist.release ()
if not self.Receptionist_2 is None:
self.Receptionist_2.release ()
if not self.Callee is None:
self.Callee.release ()
def Step (self,
Message,
Delay_In_Seconds = 0.0):
if self.Next_Step is None:
self.Next_Step = 1
if self.Start_Time is None:
self.Start_Time = clock ()
logging.info ("Step " + str (self.Next_Step) + ": " + Message)
sleep (Delay_In_Seconds)
self.Next_Step += 1
def Log (self,
Message,
Delay_In_Seconds = 0.0):
if self.Next_Step is None:
self.Next_Step = 1
if self.Start_Time is None:
self.Start_Time = clock ()
logging.info (" " + str (self.Next_Step - 1) + ": " + Message)
sleep (Delay_In_Seconds)
def Caller_Places_Call (self, Number):
self.Step (Message = "Caller places call to " + str (Number) + "...")
self.Log (Message = "Dialling through caller agent...")
self.Caller.dial (Number)
def Receptionist_Places_Call (self, Number):
self.Step (Message = "Receptionist places call to " + str (Number) + "...")
self.Log (Message = "Dialling through receptionist agent...")
self.Receptionist.dial (Number)
def Caller_Hears_Dialtone (self):
self.Step (Message = "Caller hears dial-tone...")
self.Log (Message = "Caller agent waits for dial-tone...")
self.Caller.sip_phone.Wait_For_Dialtone ()
def Receptionist_Hears_Dialtone (self):
self.Step (Message = "Receptionist hears dial-tone...")
self.Log (Message = "Receptionist agent waits for dial-tone...")
self.Receptionist.sip_phone.Wait_For_Dialtone ()
def Call_Announced (self):
self.Step (Message = "Receptionist's client waits for 'call_offer'...")
try:
self.Receptionist.event_stack.WaitFor ("call_offer")
except TimeOutReached:
logging.critical (self.Receptionist.event_stack.dump_stack ())
self.fail ("Call offer didn't arrive from Call-Flow-Control.")
if not self.Receptionist.event_stack.stack_contains (event_type="call_offer",
destination=self.Reception):
logging.critical (self.Receptionist.event_stack.dump_stack ())
self.fail ("The arrived call offer was not for the expected reception (destination).")
return self.Receptionist.event_stack.Get_Latest_Event (Event_Type="call_offer", Destination=self.Reception)['call']['id'],\
self.Receptionist.event_stack.Get_Latest_Event (Event_Type="call_offer", Destination=self.Reception)['call']['reception_id']
def Call_Announced_As_Locked (self, Call_ID):
self.Step (Message = "Call-Flow-Control sends out 'call_lock'...")
try:
self.Receptionist.event_stack.WaitFor (event_type = "call_lock",
call_id = Call_ID,
timeout = 20.0)
except TimeOutReached:
logging.critical (self.Receptionist.event_stack.dump_stack ())
self.fail ("No 'call_lock' event arrived from Call-Flow-Control.")
if not self.Receptionist.event_stack.stack_contains (event_type = "call_lock",
destination = self.Reception,
call_id = Call_ID):
logging.critical (self.Receptionist.event_stack.dump_stack ())
self.fail ("The arrived 'call_lock' event was not for the expected reception (destination).")
def Call_Announced_As_Unlocked (self, Call_ID):
self.Step (Message = "Call-Flow-Control sends out 'call_unlock'...")
try:
self.Receptionist.event_stack.WaitFor (event_type = "call_unlock",
call_id = Call_ID)
except TimeOutReached:
logging.critical (self.Receptionist.event_stack.dump_stack ())
self.fail ("No 'call_unlock' event arrived from Call-Flow-Control.")
if not self.Receptionist.event_stack.stack_contains (event_type = "call_unlock",
destination = self.Reception,
call_id = Call_ID):
logging.critical (self.Receptionist.event_stack.dump_stack ())
self.fail ("The arrived 'call_unlock' event was not for the expected reception (destination).")
def Request_Information (self, Reception_ID):
self.Step (Message = "Requesting (updated) information about reception " + str (Reception_ID))
Data_On_Reception = self.Reception_Database.Single (Reception_ID)
self.Step (Message = "Received information on reception " + str (Reception_ID))
return Data_On_Reception
def Offer_To_Pick_Up_Call (self, Call_Flow_Control, Call_ID):
self.Step (Message = "Client offers to answer call...")
try:
Call_Flow_Control.PickupCall (call_id = Call_ID)
except:
self.Log (Message = "Pick-up call returned an error of some kind.")
def Call_Allocation_Acknowledgement (self, Call_ID, Receptionist_ID):
self.Step (Message = "Receptionist's client waits for 'call_pickup'...")
try:
self.Receptionist.event_stack.WaitFor (event_type = "call_pickup",
call_id = Call_ID)
except TimeOutReached:
logging.critical (self.Receptionist.event_stack.dump_stack ())
self.fail ("No 'call_pickup' event arrived from Call-Flow-Control.")
try:
Event = self.Receptionist.event_stack.Get_Latest_Event (Event_Type = "call_pickup",
Call_ID = Call_ID)
except:
logging.critical (self.Receptionist.event_stack.dump_stack ())
self.fail ("Could not extract the received 'call_pickup' event from the Call-Flow-Control client.")
try:
if not Event['call']['assigned_to'] == Receptionist_ID:
logging.critical (self.Receptionist.event_stack.dump_stack ())
self.fail ("The arrived 'call_pickup' event was for " + str (Event['call']['assigned_to']) + ", and not for " + str (Receptionist_ID) + " as expected.")
except:
logging.critical (self.Receptionist.event_stack.dump_stack ())
raise
self.Log (Message = "Call picked up: " + pformat (Event))
return Event
def Receptionist_Answers (self, Call_Information, Reception_Information, After_Greeting_Played):
self.Step (Message = "Receptionist answers...")
if Call_Information['call']['greeting_played']:
try:
self.Log (Message = "Receptionist says '" + Reception_Information['short_greeting'] + "'.")
except:
self.fail ("Reception information missing 'short_greeting'.")
else:
try:
self.Log (Message = "Receptionist says '" + Reception_Information['greeting'] + "'.")
except:
self.fail ("Reception information missing 'greeting'.")
if After_Greeting_Played:
if not Call_Information['call']['greeting_played']:
self.fail ("It appears that the receptionist didn't wait long enough to allow the caller to hear the recorded message.")
else:
if Call_Information['call']['greeting_played']:
self.fail ("It appears that the receptionist waited too long, and allowed the caller to hear the recorded message.")
| AdaHeads/Coverage_Tests | disabled_tests/incoming_calls.py | Python | gpl-3.0 | 9,794 |
# Topydo - A todo.txt client written in Python.
# Copyright (C) 2014 - 2015 Bram Schoenmakers <bram@topydo.org>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
This module provides the Todo class.
"""
from datetime import date
from topydo.lib.Config import config
from topydo.lib.TodoBase import TodoBase
from topydo.lib.Utils import date_string_to_date
class Todo(TodoBase):
"""
This class adds common functionality with respect to dates to the Todo
base class, mainly by interpreting the start and due dates of task.
"""
def __init__(self, p_str):
TodoBase.__init__(self, p_str)
self.attributes = {}
def get_date(self, p_tag):
""" Given a date tag, return a date object. """
string = self.tag_value(p_tag)
result = None
try:
result = date_string_to_date(string) if string else None
except ValueError:
pass
return result
def start_date(self):
""" Returns a date object of the todo's start date. """
return self.get_date(config().tag_start())
def due_date(self):
""" Returns a date object of the todo's due date. """
return self.get_date(config().tag_due())
def is_active(self):
"""
Returns True when the start date is today or in the past and the
task has not yet been completed.
"""
start = self.start_date()
return not self.is_completed() and (not start or start <= date.today())
def is_overdue(self):
"""
Returns True when the due date is in the past and the task has not
yet been completed.
"""
return not self.is_completed() and self.days_till_due() < 0
def days_till_due(self):
"""
Returns the number of days till the due date. Returns a negative number
of days when the due date is in the past.
Returns 0 when the task has no due date.
"""
due = self.due_date()
if due:
diff = due - date.today()
return diff.days
return 0
def length(self):
"""
Returns the length (in days) of the task, by considering the start date
and the due date. When there is no start date, its creation date is
used. Returns 0 when one of these dates is missing.
"""
start = self.start_date() or self.creation_date()
due = self.due_date()
if start and due and start < due:
diff = due - start
return diff.days
else:
return 0
| bram85/topydo | topydo/lib/Todo.py | Python | gpl-3.0 | 3,165 |
from pupa.scrape import Jurisdiction, Organization
from .bills import MNBillScraper
from .committees import MNCommitteeScraper
from .people import MNPersonScraper
from .vote_events import MNVoteScraper
from .events import MNEventScraper
from .common import url_xpath
"""
Minnesota legislative data can be found at the Office of the Revisor
of Statutes:
https://www.revisor.mn.gov/
Votes:
There are not detailed vote data for Senate votes, simply yes and no counts.
Bill pages have vote counts and links to House details, so it makes more
sense to get vote data from the bill pages.
"""
class Minnesota(Jurisdiction):
division_id = "ocd-division/country:us/state:mn"
classification = "government"
name = "Minnesota"
url = "http://state.mn.us/"
check_sessions = True
scrapers = {
"bills": MNBillScraper,
"committees": MNCommitteeScraper,
"people": MNPersonScraper,
"vote_events": MNVoteScraper,
"events": MNEventScraper,
}
parties = [{'name': 'Republican'},
{'name': 'Democratic-Farmer-Labor'}]
legislative_sessions = [
{
'_scraped_name': '86th Legislature, 2009-2010',
'classification': 'primary',
'identifier': '2009-2010',
'name': '2009-2010 Regular Session'
},
{
'_scraped_name': '86th Legislature, 2010 1st Special Session',
'classification': 'special',
'identifier': '2010 1st Special Session',
'name': '2010, 1st Special Session'
},
{
'_scraped_name': '86th Legislature, 2010 2nd Special Session',
'classification': 'special',
'identifier': '2010 2nd Special Session',
'name': '2010, 2nd Special Session'
},
{
'_scraped_name': '87th Legislature, 2011-2012',
'classification': 'primary',
'identifier': '2011-2012',
'name': '2011-2012 Regular Session'
},
{
'_scraped_name': '87th Legislature, 2011 1st Special Session',
'classification': 'special',
'identifier': '2011s1',
'name': '2011, 1st Special Session'
},
{
'_scraped_name': '87th Legislature, 2012 1st Special Session',
'classification': 'special',
'identifier': '2012s1',
'name': '2012, 1st Special Session'
},
{
'_scraped_name': '88th Legislature, 2013-2014',
'classification': 'primary',
'identifier': '2013-2014',
'name': '2013-2014 Regular Session'
},
{
'_scraped_name': '88th Legislature, 2013 1st Special Session',
'classification': 'special',
'identifier': '2013s1',
'name': '2013, 1st Special Session'
},
{
'_scraped_name': '89th Legislature, 2015-2016',
'classification': 'primary',
'identifier': '2015-2016',
'name': '2015-2016 Regular Session'
},
{
'_scraped_name': '89th Legislature, 2015 1st Special Session',
'classification': 'special',
'identifier': '2015s1',
'name': '2015, 1st Special Session'
},
{
'_scraped_name': '90th Legislature, 2017-2018',
'classification': 'primary',
'identifier': '2017-2018',
'name': '2017-2018 Regular Session'
},
]
ignored_scraped_sessions = [
'85th Legislature, 2007-2008',
'85th Legislature, 2007 1st Special Session',
'84th Legislature, 2005-2006',
'84th Legislature, 2005 1st Special Session',
'83rd Legislature, 2003-2004',
'83rd Legislature, 2003 1st Special Session',
'82nd Legislature, 2001-2002',
'82nd Legislature, 2002 1st Special Session',
'82nd Legislature, 2001 1st Special Session',
'81st Legislature, 1999-2000',
'80th Legislature, 1997-1998',
'80th Legislature, 1998 1st Special Session',
'80th Legislature, 1997 3rd Special Session',
'80th Legislature, 1997 2nd Special Session',
'80th Legislature, 1997 1st Special Session',
'79th Legislature, 1995-1996',
'79th Legislature, 1995 1st Special Session',
'89th Legislature, 2015-2016',
]
def get_organizations(self):
legis = Organization('Minnesota Legislature', classification='legislature')
upper = Organization('Minnesota Senate', classification='upper',
parent_id=legis._id)
lower = Organization('Minnesota House of Representatives',
classification='lower', parent_id=legis._id)
for n in range(1, 68):
upper.add_post(label=str(n), role='Senator',
division_id='ocd-division/country:us/state:mn/sldu:{}'.format(n))
lower.add_post(label=str(n) + 'A', role='Representative',
division_id='ocd-division/country:us/state:mn/sldl:{}a'.format(n))
lower.add_post(label=str(n) + 'B', role='Representative',
division_id='ocd-division/country:us/state:mn/sldl:{}b'.format(n))
yield legis
yield upper
yield lower
def get_session_list(self):
return url_xpath('https://www.revisor.mn.gov/revisor/pages/'
'search_status/status_search.php?body=House',
'//select[@name="session"]/option/text()')
| cliftonmcintosh/openstates | openstates/mn/__init__.py | Python | gpl-3.0 | 5,612 |
'''
Copyright 2015
This file is part of Orbach.
Orbach is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Orbach is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Orbach. If not, see <http://www.gnu.org/licenses/>.
'''
from django.conf.urls import url, include
from rest_framework.routers import DefaultRouter
from orbach.core import views
router = DefaultRouter()
router.register(r'galleries', views.GalleryViewSet)
router.register(r'image_files', views.ImageFileViewSet)
router.register(r'users', views.UserViewSet)
urlpatterns = [
url(r'^', include(router.urls)),
]
| awood/orbach | orbach/core/urls.py | Python | gpl-3.0 | 1,005 |
import kivy
kivy.require('1.9.1')
from kivy.uix.popup import Popup
from kivy.uix.label import Label
from kivy.uix.gridlayout import GridLayout
from kivy.metrics import dp
from kivy.app import Builder
from kivy.properties import StringProperty, ObjectProperty
from kivy.clock import Clock
from kivy.metrics import sp
from kivy.metrics import dp
from iconbutton import IconButton
__all__ = ('alertPopup, confirmPopup, okPopup, editor_popup')
Builder.load_string('''
<ConfirmPopup>:
cols:1
Label:
text: root.text
GridLayout:
cols: 2
size_hint_y: None
height: '44sp'
spacing: '5sp'
IconButton:
text: u'\uf00c'
on_press: root.dispatch('on_answer', True)
IconButton:
text: u'\uf00d'
color: ColorScheme.get_primary()
on_release: root.dispatch('on_answer', False)
<OkPopup>:
cols:1
Label:
text: root.text
GridLayout:
cols: 2
size_hint_y: None
height: '44sp'
spacing: '5sp'
IconButton:
text: u'\uf00c'
on_press: root.dispatch('on_answer', True)
<EditorPopup>:
id: editor_popup
cols:1
BoxLayout:
id: content
GridLayout:
id: buttons
cols: 2
size_hint_y: None
height: '44sp'
spacing: '5sp'
IconButton:
text: u'\uf00c'
on_press: root.dispatch('on_answer', True)
IconButton:
text: u'\uf00d'
color: ColorScheme.get_primary()
on_release: root.dispatch('on_answer', False)
''')
def alertPopup(title, msg):
popup = Popup(title = title,
content=Label(text = msg),
size_hint=(None, None), size=(dp(600), dp(200)))
popup.open()
def confirmPopup(title, msg, answerCallback):
content = ConfirmPopup(text=msg)
content.bind(on_answer=answerCallback)
popup = Popup(title=title,
content=content,
size_hint=(None, None),
size=(dp(600),dp(200)),
auto_dismiss= False)
popup.open()
return popup
class ConfirmPopup(GridLayout):
text = StringProperty()
def __init__(self,**kwargs):
self.register_event_type('on_answer')
super(ConfirmPopup,self).__init__(**kwargs)
def on_answer(self, *args):
pass
def editor_popup(title, content, answerCallback):
content = EditorPopup(content=content)
content.bind(on_answer=answerCallback)
popup = Popup(title=title,
content=content,
size_hint=(0.7, 0.8),
auto_dismiss= False,
title_size=sp(18))
popup.open()
return popup
class EditorPopup(GridLayout):
content = ObjectProperty(None)
def __init__(self,**kwargs):
self.register_event_type('on_answer')
super(EditorPopup,self).__init__(**kwargs)
def on_content(self, instance, value):
Clock.schedule_once(lambda dt: self.ids.content.add_widget(value))
def on_answer(self, *args):
pass
def okPopup(title, msg, answerCallback):
content = OkPopup(text=msg)
content.bind(on_ok=answerCallback)
popup = Popup(title=title,
content=content,
size_hint=(None, None),
size=(dp(600),dp(200)),
auto_dismiss= False)
popup.open()
return popup
class OkPopup(GridLayout):
text = StringProperty()
def __init__(self,**kwargs):
self.register_event_type('on_ok')
super(OkPopup,self).__init__(**kwargs)
def on_ok(self, *args):
pass | ddimensia/RaceCapture_App | autosportlabs/racecapture/views/util/alertview.py | Python | gpl-3.0 | 3,843 |
class CheckBase(object):
"""
Base class for checks.
"""
hooks = []
# pylint: disable=W0105
"""Git hooks to which this class applies. A list of strings."""
def execute(self, hook):
"""
Executes the check.
:param hook: The name of the hook being run.
:type hook: :class:`str`
:returns: ``True`` if the check passed, ``False`` if not.
:rtype: :class:`bool`
"""
pass
| lddubeau/glerbl | glerbl/check/__init__.py | Python | gpl-3.0 | 461 |
#!/usr/bin/python3
### rev: 5.0
### author: <zhq>
### features:
### errors included
### up to 63 bases (2 to 64)
### caps recognition and same output format (deprecated)
### for the function parameters, `cur` represents the current (input) base, `res` represents the result (output) base, and `num` represents the current (input) number.
def scale(cur, res, num):
# int, int, str -> str
# Default Settings
num = str(num)
iscaps = False
positive = True
# Input
if cur == res: return num
if num == "0": return "0"
assert cur in range(2, 65) and res in range(2, 65), "Base not defined."
if num[0] == "-":
positive = False
num = num[1:]
result = 0
unit = 1
if cur != 10:
for i in num[::-1]:
value = ord(i)
if value in range(48, 58): value -= 48
elif value in range(65, 92): value -= 55
elif value in range(97, 123): value -= 61
elif value == 64: value = 62
elif value == 95: value = 63
assert value <= cur, "Digit larger than original base. v:%d(%s) b:%d\nCall: scale(%d, %d, %s)" % (value, i, cur, cur, res, num)
result += value * unit
unit *= cur
result = str(result)
# Output
if res != 10:
num = int(result or num)
result = ""
while num > 0:
num, value = divmod(num, res)
if value < 10: digit = value + 48
elif value < 36: digit = value + 55
elif value < 62: digit = value + 61
elif value == 62: digit = 64
elif value == 63: digit = 95
result = chr(digit) + result
if not positive: result = "-" + result
return result
| Irides-Chromium/cipher | scale_strict.py | Python | gpl-3.0 | 1,750 |
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'events.ui'
#
# Created by: PyQt4 UI code generator 4.11.4
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
from collections import *
from functools import *
import os, glob
import pandas as pd
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_SamplesDialog(QtGui.QDialog):
def __init__(self, parent=None, datafolder=None):
"""
Constructor
"""
QtGui.QDialog.__init__(self, parent)
# self.filelist = filelist
self.datafolder = datafolder
# labels font
self.font_labels = QtGui.QFont("Arial", 12, QtGui.QFont.Bold)
self.font_edits = QtGui.QFont("Arial", 12)
self.font_buttons = QtGui.QFont("Arial", 10, QtGui.QFont.Bold)
self.setupUi(self)
self.exec_()
def setupUi(self, Dialog):
Dialog.setObjectName(_fromUtf8("Dialog"))
Dialog.resize(1000, 400)
self.gridLayout = QtGui.QGridLayout(Dialog)
self.gridLayout.setObjectName(_fromUtf8("gridLayout"))
# list of Events
self.prepare_form(Dialog)
self.retranslateUi(Dialog)
QtCore.QMetaObject.connectSlotsByName(Dialog)
def load_data(self):
print(self.datafolder)
self.samplefile = glob.glob(os.path.join(self.datafolder, "*_SAMPLES.csv"))[0]
if os.path.isfile(self.samplefile):
self.samplesdf = pd.read_csv(self.samplefile, encoding='ISO-8859-1')
else:
print("File not found: ", self.samplefile)
self.samplesdf = None
self.combodefaults = {'cuvette': ['600', '2000', '4000']}
def prepare_form(self, Dialog):
# load or reload data
self.load_data()
# form dicts
edit_list = ['date', 'time', 'samplename', 'filename', 'smoothing', 'cal32', 'cal44', 'cons32', 'cons44',
'zero44', 'zero45', 'zero46', 'zero47', 'zero49']
combo_list = ['user', 'membrane', 'cuvette']
self.labels = defaultdict(defaultdict)
self.edits = defaultdict(defaultdict)
self.radios = defaultdict(defaultdict)
self.combobox = defaultdict(defaultdict)
self.labs = defaultdict(defaultdict)
self.labs = {"time": "Time",
"date": "Date",
"samplename": "Sample Name",
"filename": "File Name",
"smoothing": "Smoothing",
"cuvette": "Cuvette",
"user": "User",
"membrane": "Membrane",
"cal44": "Calibration 44",
"cal32": "Calibration 32",
"cons32": "Consumption 32",
"cons44": "Consumption 44",
"zero32": "Zero 32",
"zero44": "Zero 44",
"zero45": "Zero 45",
"zero46": "Zero 46",
"zero47": "Zero 47",
"zero49": "Zero 49"}
self.buttons = OrderedDict(sorted({'Apply': defaultdict(object), 'Delete': defaultdict(object)}.items()))
xpos, ypos = 1, 0
for row in self.samplesdf.iterrows():
row_index = row[0]
r = row[1]
self.radios[row_index] = QtGui.QRadioButton(Dialog)
self.radios[row_index].setObjectName(_fromUtf8("_".join(["radio", str(row_index)])))
self.gridLayout.addWidget(self.radios[row_index], ypos+1, 0, 1, 1)
for k in ['samplename', 'date', 'time', 'cuvette']:
# create labels
if ypos == 0:
self.labels[k] = QtGui.QLabel(Dialog)
self.labels[k].setObjectName(_fromUtf8("_".join(["label", k])))
self.labels[k].setText(str(self.labs[k]))
self.labels[k].setAlignment(QtCore.Qt.AlignCenter | QtCore.Qt.AlignVCenter)
self.labels[k].setFont(self.font_labels)
self.gridLayout.addWidget(self.labels[k], 0, xpos, 1, 1)
if k in edit_list:
self.edits[k][row_index] = QtGui.QLineEdit(Dialog)
self.edits[k][row_index].setObjectName(_fromUtf8("_".join(["edit", k, str(row_index)])))
self.edits[k][row_index].setText(str(r[k]))
self.edits[k][row_index].setFont(self.font_edits)
if k in ['time', 'date']:
self.edits[k][row_index].setFixedWidth(80)
self.gridLayout.addWidget(self.edits[k][row_index], ypos+1, xpos, 1, 1)
elif k in combo_list:
self.combobox[k][row_index] = QtGui.QComboBox(Dialog)
self.combobox[k][row_index].setObjectName(_fromUtf8("_".join(["combo", k, str(row_index)])))
self.combobox[k][row_index].addItems(self.combodefaults[k])
self.combobox[k][row_index].setCurrentIndex(self.combobox[k][row_index].findText(str(r[k]), QtCore.Qt.MatchFixedString))
self.combobox[k][row_index].setFont(self.font_edits)
self.gridLayout.addWidget(self.combobox[k][row_index], ypos+1, xpos, 1, 1)
xpos += 1
# create buttons
for k in self.buttons.keys():
# if ypos > 0:
self.buttons[k][row_index] = QtGui.QPushButton(Dialog)
self.buttons[k][row_index].setObjectName(_fromUtf8("_".join(["event", k, "button", str(row_index)])))
self.buttons[k][row_index].setText(_translate("Dialog", k + str(row_index), None))
self.buttons[k][row_index].setFont(self.font_buttons)
if k == 'Apply':
self.buttons[k][row_index].clicked.connect(partial(self.ask_apply_changes, [row_index, Dialog]))
self.buttons[k][row_index].setStyleSheet("background-color: #ffeedd")
elif k == 'Delete':
self.buttons[k][row_index].clicked.connect(partial(self.ask_delete_confirm1, [row_index, Dialog]))
self.buttons[k][row_index].setStyleSheet("background-color: #ffcddd")
self.gridLayout.addWidget(self.buttons[k][row_index], ypos+1, xpos, 1, 1)
xpos += 1
# increments
ypos += 1
xpos = 1
Dialog.resize(1000, 70 + (30 * ypos))
# self.add_row(Dialog)
def ask_delete_confirm1(self, args):
sid = args[0]
Dialog = args[1]
# check if radio button is checked.
if self.radios[sid].isChecked():
msg = "Are you sure you want to delete the following sample : \n\n"
details = ""
for c in self.samplesdf.columns:
details += str(c) + ": " + str(self.samplesdf.at[sid, c]) + "\n"
reply = QtGui.QMessageBox.warning(self, 'Confirmation #1',
msg + details, QtGui.QMessageBox.Yes, QtGui.QMessageBox.No)
if reply == QtGui.QMessageBox.Yes:
msg2 = "Are you sure REALLY REALLY sure you want to delete the following sample ? \n\n" + \
"This is the last confirmation message. After confirming, the files will be PERMANENTLY deleted and the data WILL be lost ! \n\n"
msgbox = QtGui.QMessageBox.critical(self, 'Confirmation #2',
msg2 + details, QtGui.QMessageBox.Yes | QtGui.QMessageBox.No, QtGui.QMessageBox.No)
reply2 = msgbox
if reply2 == QtGui.QMessageBox.Yes:
# deletion confirmed
self.delete_confirmed(sid)
self.update_form( Dialog)
else:
QtGui.QMessageBox.question(self, 'Error', 'Please select the sample you want to delete on the left',
QtGui.QMessageBox.Ok)
def delete_confirmed(self, sid):
# sample file
filename = self.samplesdf.loc[sid, 'filename']
# delete row in samplesdf
self.samplesdf = self.samplesdf.drop(self.samplesdf.index[sid])
self.samplesdf.to_csv(self.samplefile, index=False, encoding='ISO-8859-1')
# delete file in rawdata
if os.path.isfile(os.path.join(self.datafolder, "rawdata", filename)):
os.remove(os.path.join(self.datafolder, "rawdata", filename))
# print(" delete: ", os.path.join(self.datafolder, "rawdata", filename))
# delete file in data
if os.path.isfile(os.path.join(self.datafolder, filename)):
os.remove(os.path.join(self.datafolder, filename))
# print(" delete: ", os.path.join(self.datafolder, filename))
def ask_apply_changes(self, args):
sid = args[0]
Dialog = args[1]
newdata=defaultdict(str)
for k in self.edits.keys():
newdata[k] = self.edits[k][sid].text()
for k in self.combobox.keys():
newdata[k] = self.combobox[k][sid].currentText()
details = ""
for k in newdata:
details += str(self.samplesdf.at[sid, k]) + '\t --> \t' + str(newdata[k]) + "\n"
msg = "Are you sure you want to apply the changes to sample " + str(self.samplesdf.at[sid, 'samplename']) + " ?\n\n"
reply = QtGui.QMessageBox.question(self, 'Modify a sample', msg + details, QtGui.QMessageBox.Yes, QtGui.QMessageBox.No)
if reply == QtGui.QMessageBox.Yes:
self.apply_changes_confirmed(sid, newdata)
self.update_form(Dialog)
else:
print('cancel modification')
def apply_changes_confirmed(self, sid, newdata):
# rename files
newdata['filename'] = str(newdata['date']) + "_" + str(newdata['samplename']) + ".csv"
os.rename(os.path.join(self.datafolder, str(self.samplesdf.at[sid, 'filename'])),
os.path.join(self.datafolder, str(newdata['filename'])))
os.rename(os.path.join(self.datafolder, "rawdata", str(self.samplesdf.at[sid, 'filename'])),
os.path.join(self.datafolder, "rawdata", str(newdata['filename'])))
for k in newdata.keys():
self.samplesdf.at[sid, k] = newdata[k]
self.samplesdf.to_csv(self.samplefile, index=False, encoding='ISO-8859-1')
def update_form(self, Dialog):
# empty variables
self.edits = None
self.combobox = None
self.buttons = None
self.radios = None
self.labs = None
self.labels = None
# empty layout
for i in reversed(range(self.gridLayout.count())):
self.gridLayout.itemAt(i).widget().setParent(None)
self.prepare_form(Dialog)
def retranslateUi(self, Dialog):
Dialog.setWindowTitle(_translate("Dialog", "Samples Manager", None))
# self.label.setText(_translate("Dialog", "File", None))
| vince8290/dana | ui_files/samples.py | Python | gpl-3.0 | 11,728 |
from cProfile import Profile
from optparse import make_option
from django.conf import settings
from django.core.management.base import (BaseCommand,
CommandError)
from treeherder.etl.buildapi import (Builds4hJobsProcess,
PendingJobsProcess,
RunningJobsProcess)
from treeherder.etl.pushlog import HgPushlogProcess
from treeherder.model.derived import RefDataManager
class Command(BaseCommand):
"""Management command to ingest data from a single push."""
help = "Ingests a single push into treeherder"
args = '<project> <changeset>'
option_list = BaseCommand.option_list + (
make_option('--profile-file',
action='store',
dest='profile_file',
default=None,
help='Profile command and write result to profile file'),
make_option('--filter-job-group',
action='store',
dest='filter_job_group',
default=None,
help="Only process jobs in specified group symbol "
"(e.g. 'T')")
)
def _handle(self, *args, **options):
if len(args) != 2:
raise CommandError("Need to specify (only) branch and changeset")
(project, changeset) = args
# get reference to repo
rdm = RefDataManager()
repos = filter(lambda x: x['name'] == project,
rdm.get_all_repository_info())
if not repos:
raise CommandError("No project found named '%s'" % project)
repo = repos[0]
# make sure all tasks are run synchronously / immediately
settings.CELERY_ALWAYS_EAGER = True
# get hg pushlog
pushlog_url = '%s/json-pushes/?full=1&version=2' % repo['url']
# ingest this particular revision for this project
process = HgPushlogProcess()
# Use the actual push SHA, in case the changeset specified was a tag
# or branch name (eg tip). HgPushlogProcess returns the full SHA, but
# job ingestion expects the short version, so we truncate it.
push_sha = process.run(pushlog_url, project, changeset=changeset)[0:12]
Builds4hJobsProcess().run(filter_to_project=project,
filter_to_revision=push_sha,
filter_to_job_group=options['filter_job_group'])
PendingJobsProcess().run(filter_to_project=project,
filter_to_revision=push_sha,
filter_to_job_group=options['filter_job_group'])
RunningJobsProcess().run(filter_to_project=project,
filter_to_revision=push_sha,
filter_to_job_group=options['filter_job_group'])
def handle(self, *args, **options):
if options['profile_file']:
profiler = Profile()
profiler.runcall(self._handle, *args, **options)
profiler.dump_stats(options['profile_file'])
else:
self._handle(*args, **options)
| adusca/treeherder | treeherder/etl/management/commands/ingest_push.py | Python | mpl-2.0 | 3,195 |
#
# This is the configuration file for the RPi environd
#
### Presentation - General
# All datetime stamps use typical strftime codes: https://docs.python.org/2/library/datetime.html#strftime-strptime-behavior
# The date/time stamp of the last (most current) reading.
present_lastread_stamp = "%I:%M %p on %A, %b %d"
# How many decimal places to round to when displaying temperatures. For
# presentation only - does not impact reading precision in the database.
present_temp_precision = 1
### Presentation - Recent Graph
# The date/time stamp on the x-axis
present_graph_recent_x = "%I:%M %p"
# How many data points to use.
# This does _not_ reflect how many points will be drawn. Also consider how
# often the readings are made - e.g., if a value is recorded every 15 minutes,
# then a full day's worth of data requires 24x(60/15) = 96 points.
present_recent_point_count = 720
# How much to reduce the specified number of data points.
# This is how many points will be drawn. The value of
# present_recent_point_count is divided in to this many chunks, and then time
# stamp and value of each chunk is averaged.
present_recent_reduce_to = 16
### Presentation - All Time Graph
# < tbd... not implemented yet >
### Files
# The static html file that is output. Must be writable by the user running
# environd. Presumably this is in the www directory of a web server.
www_out = "/var/www/environd.html"
# The template to use for generating static html.
# Must be readable by the user running environd.
html_template = "/opt/environd/template/environd.tpl"
# The (flat text) database file.
# Must be writable by the user running environd, and must exist, even if empty.
database = "/opt/environd/database/temperature_readings.json"
# The log file. Must be writable by the user running environd.
log_file = "/var/log/environd.log"
# Format of the timestamping used internally.
# Does not impact presentation unless presented values are omitted.
datetime_func_format = "%Y%m%dT%H%M%S"
### Tinker/Debug
# Set to True to print all log messages to the terminal, or False to suppress
# most output.
terminal_verbosity = True
# The size in mb after which the db file is rotated.
# The entire db is loaded in to memory, but each reading is a mere 60-80
# bytes, so 100 megs is about 10 years of recording every 15 minutes.
max_db_file_size = 100 # mb
| modalexii/RPi-Environd | config.py | Python | mpl-2.0 | 2,371 |
# coding=utf-8
''' tagsPlorer package entry point (C) 2021-2021 Arne Bachmann https://github.com/ArneBachmann/tagsplorer '''
from tagsplorer import tp
tp.Main().parse_and_run() | ArneBachmann/tagsplorer | tagsplorer/__main__.py | Python | mpl-2.0 | 183 |
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from socorro.lib import datetimeutil
from socorro.unittest.external.es.base import (
ElasticsearchTestCase,
SuperSearchWithFields,
minimum_es_version,
)
# Uncomment these lines to decrease verbosity of the elasticsearch library
# while running unit tests.
# import logging
# logging.getLogger('elasticsearch').setLevel(logging.ERROR)
# logging.getLogger('requests').setLevel(logging.ERROR)
class IntegrationTestAnalyzers(ElasticsearchTestCase):
"""Test the custom analyzers we create in our indices. """
def setUp(self):
super(IntegrationTestAnalyzers, self).setUp()
self.api = SuperSearchWithFields(config=self.config)
self.now = datetimeutil.utc_now()
@minimum_es_version('1.0')
def test_semicolon_keywords(self):
"""Test the analyzer called `semicolon_keywords`.
That analyzer creates tokens (terms) by splitting the input on
semicolons (;) only.
"""
self.index_crash({
'date_processed': self.now,
'app_init_dlls': '/path/to/dll;;foo;C:\\bar\\boo',
})
self.index_crash({
'date_processed': self.now,
'app_init_dlls': '/path/to/dll;D:\\bar\\boo',
})
self.refresh_index()
res = self.api.get(
app_init_dlls='/path/to/dll',
_facets=['app_init_dlls'],
)
assert res['total'] == 2
assert 'app_init_dlls' in res['facets']
facet_terms = [x['term'] for x in res['facets']['app_init_dlls']]
assert '/path/to/dll' in facet_terms
assert 'c:\\bar\\boo' in facet_terms
assert 'foo' in facet_terms
| Tayamarn/socorro | socorro/unittest/external/es/test_analyzers.py | Python | mpl-2.0 | 1,852 |
"""Django module for the OS2datascanner project."""
| os2webscanner/os2webscanner | django-os2webscanner/os2webscanner/__init__.py | Python | mpl-2.0 | 53 |
# -*- coding: utf-8 -*-
# © 2009 Pexego/Comunitea
# © 2011-2012 Iker Coranti (www.avanzosc.es)
# © 2014 Juanjo Algaz (gutierrezweb.es)
# © 2014-2016 Pedro M. Baeza
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl-3.0).
{
"name": "Account balance reporting engine",
"version": "8.0.1.2.0",
"author": "Pexego, "
"AvanzOSC, "
"Tecnativa, "
"Odoo Community Association (OCA)",
"website": "http://www.pexego.es",
"category": "Accounting & Finance",
"contributors": [
"Juanjo Algaz <juanjoa@malagatic.com>",
"Joaquín Gutierrez <joaquing.pedrosa@gmail.com>",
"Pedro M. Baeza <pedro.baeza@tecnativa.com>",
"Oihane Crucelaegui <oihanecrucelaegi@avanzosc.es>",
],
"license": 'AGPL-3',
"depends": [
"account",
],
"data": [
"security/ir.model.access.csv",
"views/account_account_view.xml",
"views/account_balance_reporting_template_view.xml",
"views/account_balance_reporting_report_view.xml",
"views/account_balance_reporting_menu.xml",
"report/account_balance_reporting_reports.xml",
"report/report_generic.xml",
"wizard/wizard_print_view.xml",
],
"installable": True,
}
| Endika/l10n-spain | account_balance_reporting/__openerp__.py | Python | agpl-3.0 | 1,278 |
# -*- coding:utf-8 -*-
#
#
# Copyright (C) 2013 Michael Telahun Makonnen <mmakonnen@gmail.com>.
# All Rights Reserved.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#
{
'name': 'Capture picture with webcam',
'version': '1.0',
'category': 'Generic Modules/Human Resources',
'description': """
TApplicant WebCam
=========
Capture employee pictures with an attached web cam.
""",
'author': "Michael Telahun Makonnen <mmakonnen@gmail.com>,"
"Odoo Community Association (OCA)",
'website': 'http://miketelahun.wordpress.com',
'license': 'AGPL-3',
'depends': [
'hr',
'web',
'trip'
],
'js': [
'static/src/js/jquery.webcam.js',
'static/src/js/tapplicant_webcam.js',
],
'css': [
'static/src/css/tapplicant_webcam.css',
],
'qweb': [
'static/src/xml/tapplicant_webcam.xml',
],
'data': [
'tapplicant_webcam_data.xml',
'tapplicant_webcam_view.xml',
],
'installable': True,
'active': False,
}
| nishad-jobsglobal/odoo-marriot | openerp/addons/tapplicant_webcam/__openerp__.py | Python | agpl-3.0 | 1,685 |
# ActivitySim
# Copyright (C) 2014-2015 Synthicity, LLC
# See full license in LICENSE.txt.
import os.path
import numpy as np
import pandas as pd
import pandas.util.testing as pdt
import pytest
from ..activitysim import eval_variables
from .. import mnl
# this is lifted straight from urbansim's test_mnl.py
@pytest.fixture(scope='module', params=[
('fish.csv',
'fish_choosers.csv',
pd.DataFrame(
[[-0.02047652], [0.95309824]], index=['price', 'catch'],
columns=['Alt']),
pd.DataFrame([
[0.2849598, 0.2742482, 0.1605457, 0.2802463],
[0.1498991, 0.4542377, 0.2600969, 0.1357664]],
columns=['beach', 'boat', 'charter', 'pier']))])
def test_data(request):
data, choosers, spec, probabilities = request.param
return {
'data': data,
'choosers': choosers,
'spec': spec,
'probabilities': probabilities
}
@pytest.fixture
def choosers(test_data):
filen = os.path.join(
os.path.dirname(__file__), 'data', test_data['choosers'])
return pd.read_csv(filen)
@pytest.fixture
def spec(test_data):
return test_data['spec']
@pytest.fixture
def choosers_dm(choosers, spec):
return eval_variables(spec.index, choosers)
@pytest.fixture
def utilities(choosers_dm, spec, test_data):
utils = choosers_dm.dot(spec).astype('float')
return pd.DataFrame(
utils.as_matrix().reshape(test_data['probabilities'].shape),
columns=test_data['probabilities'].columns)
def test_utils_to_probs(utilities, test_data):
probs = mnl.utils_to_probs(utilities)
pdt.assert_frame_equal(probs, test_data['probabilities'])
def test_utils_to_probs_raises():
with pytest.raises(RuntimeError):
mnl.utils_to_probs(
pd.DataFrame([[1, 2, np.inf, 3]]))
def test_make_choices_only_one():
probs = pd.DataFrame(
[[1, 0, 0], [0, 1, 0]], columns=['a', 'b', 'c'], index=['x', 'y'])
choices = mnl.make_choices(probs)
pdt.assert_series_equal(
choices,
pd.Series([0, 1], index=['x', 'y']))
def test_make_choices_real_probs(random_seed, utilities):
probs = mnl.utils_to_probs(utilities)
choices = mnl.make_choices(probs)
pdt.assert_series_equal(
choices,
pd.Series([1, 2], index=[0, 1]))
@pytest.fixture(scope='module')
def interaction_choosers():
return pd.DataFrame({
'attr': ['a', 'b', 'c', 'b']},
index=['w', 'x', 'y', 'z'])
@pytest.fixture(scope='module')
def interaction_alts():
return pd.DataFrame({
'prop': [10, 20, 30, 40]},
index=[1, 2, 3, 4])
def test_interaction_dataset_no_sample(interaction_choosers, interaction_alts):
expected = pd.DataFrame({
'attr': ['a'] * 4 + ['b'] * 4 + ['c'] * 4 + ['b'] * 4,
'prop': [10, 20, 30, 40] * 4,
'chooser_idx': ['w'] * 4 + ['x'] * 4 + ['y'] * 4 + ['z'] * 4},
index=[1, 2, 3, 4] * 4)
interacted = mnl.interaction_dataset(
interaction_choosers, interaction_alts)
interacted, expected = interacted.align(expected, axis=1)
pdt.assert_frame_equal(interacted, expected)
def test_interaction_dataset_sampled(
interaction_choosers, interaction_alts, random_seed):
expected = pd.DataFrame({
'attr': ['a'] * 2 + ['b'] * 2 + ['c'] * 2 + ['b'] * 2,
'prop': [30, 40, 10, 30, 40, 10, 20, 10],
'chooser_idx': ['w'] * 2 + ['x'] * 2 + ['y'] * 2 + ['z'] * 2},
index=[3, 4, 1, 3, 4, 1, 2, 1])
interacted = mnl.interaction_dataset(
interaction_choosers, interaction_alts, sample_size=2)
interacted, expected = interacted.align(expected, axis=1)
pdt.assert_frame_equal(interacted, expected)
| bhargavasana/activitysim | activitysim/tests/test_mnl.py | Python | agpl-3.0 | 3,724 |
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2013-2014 OpenERP (<http://www.openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
""" High-level objects for fields. """
from collections import OrderedDict
from datetime import date, datetime
from functools import partial
from operator import attrgetter
from types import NoneType
import logging
import pytz
import xmlrpclib
from openerp.tools import float_round, frozendict, html_sanitize, ustr, OrderedSet
from openerp.tools import DEFAULT_SERVER_DATE_FORMAT as DATE_FORMAT
from openerp.tools import DEFAULT_SERVER_DATETIME_FORMAT as DATETIME_FORMAT
DATE_LENGTH = len(date.today().strftime(DATE_FORMAT))
DATETIME_LENGTH = len(datetime.now().strftime(DATETIME_FORMAT))
EMPTY_DICT = frozendict()
_logger = logging.getLogger(__name__)
class SpecialValue(object):
""" Encapsulates a value in the cache in place of a normal value. """
def __init__(self, value):
self.value = value
def get(self):
return self.value
class FailedValue(SpecialValue):
""" Special value that encapsulates an exception instead of a value. """
def __init__(self, exception):
self.exception = exception
def get(self):
raise self.exception
def _check_value(value):
""" Return ``value``, or call its getter if ``value`` is a :class:`SpecialValue`. """
return value.get() if isinstance(value, SpecialValue) else value
def resolve_all_mro(cls, name, reverse=False):
""" Return the (successively overridden) values of attribute ``name`` in ``cls``
in mro order, or inverse mro order if ``reverse`` is true.
"""
klasses = reversed(cls.__mro__) if reverse else cls.__mro__
for klass in klasses:
if name in klass.__dict__:
yield klass.__dict__[name]
class MetaField(type):
""" Metaclass for field classes. """
by_type = {}
def __new__(meta, name, bases, attrs):
""" Combine the ``_slots`` dict from parent classes, and determine
``__slots__`` for them on the new class.
"""
base_slots = {}
for base in reversed(bases):
base_slots.update(getattr(base, '_slots', ()))
slots = dict(base_slots)
slots.update(attrs.get('_slots', ()))
attrs['__slots__'] = set(slots) - set(base_slots)
attrs['_slots'] = slots
return type.__new__(meta, name, bases, attrs)
def __init__(cls, name, bases, attrs):
super(MetaField, cls).__init__(name, bases, attrs)
if cls.type and cls.type not in MetaField.by_type:
MetaField.by_type[cls.type] = cls
# compute class attributes to avoid calling dir() on fields
cls.column_attrs = []
cls.related_attrs = []
cls.description_attrs = []
for attr in dir(cls):
if attr.startswith('_column_'):
cls.column_attrs.append((attr[8:], attr))
elif attr.startswith('_related_'):
cls.related_attrs.append((attr[9:], attr))
elif attr.startswith('_description_'):
cls.description_attrs.append((attr[13:], attr))
class Field(object):
""" The field descriptor contains the field definition, and manages accesses
and assignments of the corresponding field on records. The following
attributes may be provided when instanciating a field:
:param string: the label of the field seen by users (string); if not
set, the ORM takes the field name in the class (capitalized).
:param help: the tooltip of the field seen by users (string)
:param readonly: whether the field is readonly (boolean, by default ``False``)
:param required: whether the value of the field is required (boolean, by
default ``False``)
:param index: whether the field is indexed in database (boolean, by
default ``False``)
:param default: the default value for the field; this is either a static
value, or a function taking a recordset and returning a value
:param states: a dictionary mapping state values to lists of UI attribute-value
pairs; possible attributes are: 'readonly', 'required', 'invisible'.
Note: Any state-based condition requires the ``state`` field value to be
available on the client-side UI. This is typically done by including it in
the relevant views, possibly made invisible if not relevant for the
end-user.
:param groups: comma-separated list of group xml ids (string); this
restricts the field access to the users of the given groups only
:param bool copy: whether the field value should be copied when the record
is duplicated (default: ``True`` for normal fields, ``False`` for
``one2many`` and computed fields, including property fields and
related fields)
:param string oldname: the previous name of this field, so that ORM can rename
it automatically at migration
.. _field-computed:
.. rubric:: Computed fields
One can define a field whose value is computed instead of simply being
read from the database. The attributes that are specific to computed
fields are given below. To define such a field, simply provide a value
for the attribute ``compute``.
:param compute: name of a method that computes the field
:param inverse: name of a method that inverses the field (optional)
:param search: name of a method that implement search on the field (optional)
:param store: whether the field is stored in database (boolean, by
default ``False`` on computed fields)
:param compute_sudo: whether the field should be recomputed as superuser
to bypass access rights (boolean, by default ``False``)
The methods given for ``compute``, ``inverse`` and ``search`` are model
methods. Their signature is shown in the following example::
upper = fields.Char(compute='_compute_upper',
inverse='_inverse_upper',
search='_search_upper')
@api.depends('name')
def _compute_upper(self):
for rec in self:
rec.upper = rec.name.upper() if rec.name else False
def _inverse_upper(self):
for rec in self:
rec.name = rec.upper.lower() if rec.upper else False
def _search_upper(self, operator, value):
if operator == 'like':
operator = 'ilike'
return [('name', operator, value)]
The compute method has to assign the field on all records of the invoked
recordset. The decorator :meth:`openerp.api.depends` must be applied on
the compute method to specify the field dependencies; those dependencies
are used to determine when to recompute the field; recomputation is
automatic and guarantees cache/database consistency. Note that the same
method can be used for several fields, you simply have to assign all the
given fields in the method; the method will be invoked once for all
those fields.
By default, a computed field is not stored to the database, and is
computed on-the-fly. Adding the attribute ``store=True`` will store the
field's values in the database. The advantage of a stored field is that
searching on that field is done by the database itself. The disadvantage
is that it requires database updates when the field must be recomputed.
The inverse method, as its name says, does the inverse of the compute
method: the invoked records have a value for the field, and you must
apply the necessary changes on the field dependencies such that the
computation gives the expected value. Note that a computed field without
an inverse method is readonly by default.
The search method is invoked when processing domains before doing an
actual search on the model. It must return a domain equivalent to the
condition: ``field operator value``.
.. _field-related:
.. rubric:: Related fields
The value of a related field is given by following a sequence of
relational fields and reading a field on the reached model. The complete
sequence of fields to traverse is specified by the attribute
:param related: sequence of field names
Some field attributes are automatically copied from the source field if
they are not redefined: ``string``, ``help``, ``readonly``, ``required`` (only
if all fields in the sequence are required), ``groups``, ``digits``, ``size``,
``translate``, ``sanitize``, ``selection``, ``comodel_name``, ``domain``,
``context``. All semantic-free attributes are copied from the source
field.
By default, the values of related fields are not stored to the database.
Add the attribute ``store=True`` to make it stored, just like computed
fields. Related fields are automatically recomputed when their
dependencies are modified.
.. _field-company-dependent:
.. rubric:: Company-dependent fields
Formerly known as 'property' fields, the value of those fields depends
on the company. In other words, users that belong to different companies
may see different values for the field on a given record.
:param company_dependent: whether the field is company-dependent (boolean)
.. _field-incremental-definition:
.. rubric:: Incremental definition
A field is defined as class attribute on a model class. If the model
is extended (see :class:`~openerp.models.Model`), one can also extend
the field definition by redefining a field with the same name and same
type on the subclass. In that case, the attributes of the field are
taken from the parent class and overridden by the ones given in
subclasses.
For instance, the second class below only adds a tooltip on the field
``state``::
class First(models.Model):
_name = 'foo'
state = fields.Selection([...], required=True)
class Second(models.Model):
_inherit = 'foo'
state = fields.Selection(help="Blah blah blah")
"""
__metaclass__ = MetaField
type = None # type of the field (string)
relational = False # whether the field is a relational one
_slots = {
'_attrs': EMPTY_DICT, # dictionary of field attributes; it contains:
# - all attributes after __init__()
# - free attributes only after set_class_name()
'automatic': False, # whether the field is automatically created ("magic" field)
'inherited': False, # whether the field is inherited (_inherits)
'column': None, # the column corresponding to the field
'setup_done': False, # whether the field has been set up
'name': None, # name of the field
'model_name': None, # name of the model of this field
'comodel_name': None, # name of the model of values (if relational)
'store': True, # whether the field is stored in database
'index': False, # whether the field is indexed in database
'manual': False, # whether the field is a custom field
'copy': True, # whether the field is copied over by BaseModel.copy()
'depends': (), # collection of field dependencies
'recursive': False, # whether self depends on itself
'compute': None, # compute(recs) computes field on recs
'compute_sudo': False, # whether field should be recomputed as admin
'inverse': None, # inverse(recs) inverses field on recs
'search': None, # search(recs, operator, value) searches on self
'related': None, # sequence of field names, for related fields
'related_sudo': True, # whether related fields should be read as admin
'company_dependent': False, # whether ``self`` is company-dependent (property field)
'default': None, # default(recs) returns the default value
'string': None, # field label
'help': None, # field tooltip
'readonly': False, # whether the field is readonly
'required': False, # whether the field is required
'states': None, # set readonly and required depending on state
'groups': None, # csv list of group xml ids
'change_default': False, # whether the field may trigger a "user-onchange"
'deprecated': None, # whether the field is deprecated
'inverse_fields': (), # collection of inverse fields (objects)
'computed_fields': (), # fields computed with the same method as self
'related_field': None, # corresponding related field
'_triggers': (), # invalidation and recomputation triggers
}
def __init__(self, string=None, **kwargs):
kwargs['string'] = string
attrs = {key: val for key, val in kwargs.iteritems() if val is not None}
self._attrs = attrs or EMPTY_DICT
def __getattr__(self, name):
""" Access non-slot field attribute. """
try:
return self._attrs[name]
except KeyError:
raise AttributeError(name)
def __setattr__(self, name, value):
""" Set slot or non-slot field attribute. """
try:
object.__setattr__(self, name, value)
except AttributeError:
if self._attrs:
self._attrs[name] = value
else:
self._attrs = {name: value} # replace EMPTY_DICT
def __delattr__(self, name):
""" Remove non-slot field attribute. """
try:
del self._attrs[name]
except KeyError:
raise AttributeError(name)
def new(self, **kwargs):
""" Return a field of the same type as ``self``, with its own parameters. """
return type(self)(**kwargs)
def set_class_name(self, cls, name):
""" Assign the model class and field name of ``self``. """
self_attrs = self._attrs
for attr, value in self._slots.iteritems():
setattr(self, attr, value)
self.model_name = cls._name
self.name = name
# determine all inherited field attributes
attrs = {}
for field in resolve_all_mro(cls, name, reverse=True):
if isinstance(field, type(self)):
attrs.update(field._attrs)
else:
attrs.clear()
attrs.update(self_attrs) # necessary in case self is not in cls
# initialize ``self`` with ``attrs``
if attrs.get('compute'):
# by default, computed fields are not stored, not copied and readonly
attrs['store'] = attrs.get('store', False)
attrs['copy'] = attrs.get('copy', False)
attrs['readonly'] = attrs.get('readonly', not attrs.get('inverse'))
if attrs.get('related'):
# by default, related fields are not stored and not copied
attrs['store'] = attrs.get('store', False)
attrs['copy'] = attrs.get('copy', False)
# fix for function fields overridden by regular columns
if not isinstance(attrs.get('column'), (NoneType, fields.function)):
attrs.pop('store', None)
for attr, value in attrs.iteritems():
setattr(self, attr, value)
if not self.string and not self.related:
# related fields get their string from their parent field
self.string = name.replace('_', ' ').capitalize()
# determine self.default and cls._defaults in a consistent way
self._determine_default(cls, name)
def _determine_default(self, cls, name):
""" Retrieve the default value for ``self`` in the hierarchy of ``cls``, and
determine ``self.default`` and ``cls._defaults`` accordingly.
"""
self.default = None
# traverse the class hierarchy upwards, and take the first field
# definition with a default or _defaults for self
for klass in cls.__mro__:
if name in klass.__dict__:
field = klass.__dict__[name]
if not isinstance(field, type(self)):
# klass contains another value overridden by self
return
if 'default' in field._attrs:
# take the default in field, and adapt it for cls._defaults
value = field._attrs['default']
if callable(value):
from openerp import api
self.default = value
cls._defaults[name] = api.model(
lambda recs: self.convert_to_write(value(recs))
)
else:
self.default = lambda recs: value
cls._defaults[name] = value
return
defaults = klass.__dict__.get('_defaults') or {}
if name in defaults:
# take the value from _defaults, and adapt it for self.default
value = defaults[name]
if callable(value):
func = lambda recs: value(recs._model, recs._cr, recs._uid, recs._context)
else:
func = lambda recs: value
self.default = lambda recs: self.convert_to_cache(
func(recs), recs, validate=False,
)
cls._defaults[name] = value
return
def __str__(self):
return "%s.%s" % (self.model_name, self.name)
def __repr__(self):
return "%s.%s" % (self.model_name, self.name)
############################################################################
#
# Field setup
#
def setup(self, env):
""" Make sure that ``self`` is set up, except for recomputation triggers. """
if not self.setup_done:
if self.related:
self._setup_related(env)
else:
self._setup_regular(env)
self.setup_done = True
#
# Setup of non-related fields
#
def _setup_regular(self, env):
""" Setup the attributes of a non-related field. """
recs = env[self.model_name]
def make_depends(deps):
return tuple(deps(recs) if callable(deps) else deps)
# convert compute into a callable and determine depends
if isinstance(self.compute, basestring):
# if the compute method has been overridden, concatenate all their _depends
self.depends = ()
for method in resolve_all_mro(type(recs), self.compute, reverse=True):
self.depends += make_depends(getattr(method, '_depends', ()))
self.compute = getattr(type(recs), self.compute)
else:
self.depends = make_depends(getattr(self.compute, '_depends', ()))
# convert inverse and search into callables
if isinstance(self.inverse, basestring):
self.inverse = getattr(type(recs), self.inverse)
if isinstance(self.search, basestring):
self.search = getattr(type(recs), self.search)
#
# Setup of related fields
#
def _setup_related(self, env):
""" Setup the attributes of a related field. """
# fix the type of self.related if necessary
if isinstance(self.related, basestring):
self.related = tuple(self.related.split('.'))
# determine the chain of fields, and make sure they are all set up
recs = env[self.model_name]
fields = []
for name in self.related:
field = recs._fields[name]
field.setup(env)
recs = recs[name]
fields.append(field)
self.related_field = field
# check type consistency
if self.type != field.type:
raise Warning("Type of related field %s is inconsistent with %s" % (self, field))
# determine dependencies, compute, inverse, and search
self.depends = ('.'.join(self.related),)
self.compute = self._compute_related
if not (self.readonly or field.readonly):
self.inverse = self._inverse_related
if field._description_searchable:
# allow searching on self only if the related field is searchable
self.search = self._search_related
# copy attributes from field to self (string, help, etc.)
for attr, prop in self.related_attrs:
if not getattr(self, attr):
setattr(self, attr, getattr(field, prop))
for attr, value in field._attrs.iteritems():
if attr not in self._attrs:
setattr(self, attr, value)
# special case for states: copy it only for inherited fields
if not self.states and self.inherited:
self.states = field.states
# special case for required: check if all fields are required
if not self.store and not self.required:
self.required = all(field.required for field in fields)
def _compute_related(self, records):
""" Compute the related field ``self`` on ``records``. """
# when related_sudo, bypass access rights checks when reading values
others = records.sudo() if self.related_sudo else records
for record, other in zip(records, others):
if not record.id:
# draft record, do not switch to another environment
other = record
# traverse the intermediate fields; follow the first record at each step
for name in self.related[:-1]:
other = other[name][:1]
record[self.name] = other[self.related[-1]]
def _inverse_related(self, records):
""" Inverse the related field ``self`` on ``records``. """
# store record values, otherwise they may be lost by cache invalidation!
record_value = {record: record[self.name] for record in records}
for record in records:
other = record
# traverse the intermediate fields, and keep at most one record
for name in self.related[:-1]:
other = other[name][:1]
if other:
other[self.related[-1]] = record_value[record]
def _search_related(self, records, operator, value):
""" Determine the domain to search on field ``self``. """
return [('.'.join(self.related), operator, value)]
# properties used by _setup_related() to copy values from related field
_related_comodel_name = property(attrgetter('comodel_name'))
_related_string = property(attrgetter('string'))
_related_help = property(attrgetter('help'))
_related_readonly = property(attrgetter('readonly'))
_related_groups = property(attrgetter('groups'))
@property
def base_field(self):
""" Return the base field of an inherited field, or ``self``. """
return self.related_field.base_field if self.inherited else self
#
# Setup of field triggers
#
# The triggers is a collection of pairs (field, path) of computed fields
# that depend on ``self``. When ``self`` is modified, it invalidates the cache
# of each ``field``, and registers the records to recompute based on ``path``.
# See method ``modified`` below for details.
#
def add_trigger(self, trigger):
""" Add a recomputation trigger on ``self``. """
if trigger not in self._triggers:
self._triggers += (trigger,)
def setup_triggers(self, env):
""" Add the necessary triggers to invalidate/recompute ``self``. """
model = env[self.model_name]
for path in self.depends:
self._setup_dependency([], model, path.split('.'))
def _setup_dependency(self, path0, model, path1):
""" Make ``self`` depend on ``model``; `path0 + path1` is a dependency of
``self``, and ``path0`` is the sequence of field names from ``self.model``
to ``model``.
"""
env = model.env
head, tail = path1[0], path1[1:]
if head == '*':
# special case: add triggers on all fields of model (except self)
fields = set(model._fields.itervalues()) - set([self])
else:
fields = [model._fields[head]]
for field in fields:
if field == self:
_logger.debug("Field %s is recursively defined", self)
self.recursive = True
continue
#_logger.debug("Add trigger on %s to recompute %s", field, self)
field.add_trigger((self, '.'.join(path0 or ['id'])))
# add trigger on inverse fields, too
for invf in field.inverse_fields:
#_logger.debug("Add trigger on %s to recompute %s", invf, self)
invf.add_trigger((self, '.'.join(path0 + [head])))
# recursively traverse the dependency
if tail:
comodel = env[field.comodel_name]
self._setup_dependency(path0 + [head], comodel, tail)
@property
def dependents(self):
""" Return the computed fields that depend on ``self``. """
return (field for field, path in self._triggers)
############################################################################
#
# Field description
#
def get_description(self, env):
""" Return a dictionary that describes the field ``self``. """
desc = {'type': self.type}
for attr, prop in self.description_attrs:
value = getattr(self, prop)
if callable(value):
value = value(env)
if value is not None:
desc[attr] = value
return desc
# properties used by get_description()
_description_store = property(attrgetter('store'))
_description_manual = property(attrgetter('manual'))
_description_depends = property(attrgetter('depends'))
_description_related = property(attrgetter('related'))
_description_company_dependent = property(attrgetter('company_dependent'))
_description_readonly = property(attrgetter('readonly'))
_description_required = property(attrgetter('required'))
_description_states = property(attrgetter('states'))
_description_groups = property(attrgetter('groups'))
_description_change_default = property(attrgetter('change_default'))
_description_deprecated = property(attrgetter('deprecated'))
@property
def _description_searchable(self):
return bool(self.store or self.search or (self.column and self.column._fnct_search))
@property
def _description_sortable(self):
return self.store or (self.inherited and self.related_field._description_sortable)
def _description_string(self, env):
if self.string and env.lang:
field = self.base_field
name = "%s,%s" % (field.model_name, field.name)
trans = env['ir.translation']._get_source(name, 'field', env.lang)
return trans or self.string
return self.string
def _description_help(self, env):
if self.help and env.lang:
name = "%s,%s" % (self.model_name, self.name)
trans = env['ir.translation']._get_source(name, 'help', env.lang)
return trans or self.help
return self.help
############################################################################
#
# Conversion to column instance
#
def to_column(self):
""" Return a column object corresponding to ``self``, or ``None``. """
if not self.store and self.compute:
# non-stored computed fields do not have a corresponding column
self.column = None
return None
# determine column parameters
#_logger.debug("Create fields._column for Field %s", self)
args = {}
for attr, prop in self.column_attrs:
args[attr] = getattr(self, prop)
for attr, value in self._attrs.iteritems():
args[attr] = value
if self.company_dependent:
# company-dependent fields are mapped to former property fields
args['type'] = self.type
args['relation'] = self.comodel_name
self.column = fields.property(**args)
elif self.column:
# let the column provide a valid column for the given parameters
self.column = self.column.new(_computed_field=bool(self.compute), **args)
else:
# create a fresh new column of the right type
self.column = getattr(fields, self.type)(**args)
return self.column
# properties used by to_column() to create a column instance
_column_copy = property(attrgetter('copy'))
_column_select = property(attrgetter('index'))
_column_manual = property(attrgetter('manual'))
_column_string = property(attrgetter('string'))
_column_help = property(attrgetter('help'))
_column_readonly = property(attrgetter('readonly'))
_column_required = property(attrgetter('required'))
_column_states = property(attrgetter('states'))
_column_groups = property(attrgetter('groups'))
_column_change_default = property(attrgetter('change_default'))
_column_deprecated = property(attrgetter('deprecated'))
############################################################################
#
# Conversion of values
#
def null(self, env):
""" return the null value for this field in the given environment """
return False
def convert_to_cache(self, value, record, validate=True):
""" convert ``value`` to the cache level in ``env``; ``value`` may come from
an assignment, or have the format of methods :meth:`BaseModel.read`
or :meth:`BaseModel.write`
:param record: the target record for the assignment, or an empty recordset
:param bool validate: when True, field-specific validation of
``value`` will be performed
"""
return value
def convert_to_read(self, value, use_name_get=True):
""" convert ``value`` from the cache to a value as returned by method
:meth:`BaseModel.read`
:param bool use_name_get: when True, value's diplay name will
be computed using :meth:`BaseModel.name_get`, if relevant
for the field
"""
return False if value is None else value
def convert_to_write(self, value, target=None, fnames=None):
""" convert ``value`` from the cache to a valid value for method
:meth:`BaseModel.write`.
:param target: optional, the record to be modified with this value
:param fnames: for relational fields only, an optional collection of
field names to convert
"""
return self.convert_to_read(value)
def convert_to_onchange(self, value):
""" convert ``value`` from the cache to a valid value for an onchange
method v7.
"""
return self.convert_to_write(value)
def convert_to_export(self, value, env):
""" convert ``value`` from the cache to a valid value for export. The
parameter ``env`` is given for managing translations.
"""
if not value:
return ''
return value if env.context.get('export_raw_data') else ustr(value)
def convert_to_display_name(self, value, record=None):
""" convert ``value`` from the cache to a suitable display name. """
return ustr(value)
############################################################################
#
# Descriptor methods
#
def __get__(self, record, owner):
""" return the value of field ``self`` on ``record`` """
if record is None:
return self # the field is accessed through the owner class
if not record:
# null record -> return the null value for this field
return self.null(record.env)
# only a single record may be accessed
record.ensure_one()
try:
return record._cache[self]
except KeyError:
pass
# cache miss, retrieve value
if record.id:
# normal record -> read or compute value for this field
self.determine_value(record)
else:
# draft record -> compute the value or let it be null
self.determine_draft_value(record)
# the result should be in cache now
return record._cache[self]
def __set__(self, record, value):
""" set the value of field ``self`` on ``record`` """
env = record.env
# only a single record may be updated
record.ensure_one()
# adapt value to the cache level
value = self.convert_to_cache(value, record)
if env.in_draft or not record.id:
# determine dependent fields
spec = self.modified_draft(record)
# set value in cache, inverse field, and mark record as dirty
record._cache[self] = value
if env.in_onchange:
for invf in self.inverse_fields:
invf._update(value, record)
record._set_dirty(self.name)
# determine more dependent fields, and invalidate them
if self.relational:
spec += self.modified_draft(record)
env.invalidate(spec)
else:
# simply write to the database, and update cache
record.write({self.name: self.convert_to_write(value)})
record._cache[self] = value
############################################################################
#
# Computation of field values
#
def _compute_value(self, records):
""" Invoke the compute method on ``records``. """
# initialize the fields to their corresponding null value in cache
for field in self.computed_fields:
records._cache[field] = field.null(records.env)
records.env.computed[field].update(records._ids)
self.compute(records)
for field in self.computed_fields:
records.env.computed[field].difference_update(records._ids)
def compute_value(self, records):
""" Invoke the compute method on ``records``; the results are in cache. """
with records.env.do_in_draft():
try:
self._compute_value(records)
except (AccessError, MissingError):
# some record is forbidden or missing, retry record by record
for record in records:
try:
self._compute_value(record)
except Exception as exc:
record._cache[self.name] = FailedValue(exc)
def determine_value(self, record):
""" Determine the value of ``self`` for ``record``. """
env = record.env
if self.column and not (self.depends and env.in_draft):
# this is a stored field or an old-style function field
if self.depends:
# this is a stored computed field, check for recomputation
recs = record._recompute_check(self)
if recs:
# recompute the value (only in cache)
self.compute_value(recs)
# HACK: if result is in the wrong cache, copy values
if recs.env != env:
for source, target in zip(recs, recs.with_env(env)):
try:
values = target._convert_to_cache({
f.name: source[f.name] for f in self.computed_fields
}, validate=False)
except MissingError as e:
values = FailedValue(e)
target._cache.update(values)
# the result is saved to database by BaseModel.recompute()
return
# read the field from database
record._prefetch_field(self)
elif self.compute:
# this is either a non-stored computed field, or a stored computed
# field in draft mode
if self.recursive:
self.compute_value(record)
else:
recs = record._in_cache_without(self)
self.compute_value(recs)
else:
# this is a non-stored non-computed field
record._cache[self] = self.null(env)
def determine_draft_value(self, record):
""" Determine the value of ``self`` for the given draft ``record``. """
if self.compute:
self._compute_value(record)
else:
record._cache[self] = SpecialValue(self.null(record.env))
def determine_inverse(self, records):
""" Given the value of ``self`` on ``records``, inverse the computation. """
if self.inverse:
self.inverse(records)
def determine_domain(self, records, operator, value):
""" Return a domain representing a condition on ``self``. """
if self.search:
return self.search(records, operator, value)
else:
return [(self.name, operator, value)]
############################################################################
#
# Notification when fields are modified
#
def modified(self, records):
""" Notify that field ``self`` has been modified on ``records``: prepare the
fields/records to recompute, and return a spec indicating what to
invalidate.
"""
# invalidate the fields that depend on self, and prepare recomputation
spec = [(self, records._ids)]
for field, path in self._triggers:
if path and field.store:
# don't move this line to function top, see log
env = records.env(user=SUPERUSER_ID, context={'active_test': False})
target = env[field.model_name].search([(path, 'in', records.ids)])
if target:
spec.append((field, target._ids))
# recompute field on target in the environment of records,
# and as user admin if required
if field.compute_sudo:
target = target.with_env(records.env(user=SUPERUSER_ID))
else:
target = target.with_env(records.env)
target._recompute_todo(field)
else:
spec.append((field, None))
return spec
def modified_draft(self, records):
""" Same as :meth:`modified`, but in draft mode. """
env = records.env
# invalidate the fields on the records in cache that depend on
# ``records``, except fields currently being computed
spec = []
for field, path in self._triggers:
target = env[field.model_name]
computed = target.browse(env.computed[field])
if path == 'id':
target = records - computed
elif path:
target = (target.browse(env.cache[field]) - computed).filtered(
lambda rec: rec._mapped_cache(path) & records
)
else:
target = target.browse(env.cache[field]) - computed
if target:
spec.append((field, target._ids))
return spec
class Boolean(Field):
type = 'boolean'
def convert_to_cache(self, value, record, validate=True):
return bool(value)
def convert_to_export(self, value, env):
if env.context.get('export_raw_data'):
return value
return ustr(value)
class Integer(Field):
type = 'integer'
_slots = {
'group_operator': None, # operator for aggregating values
'group_expression': None, # advance expression for aggregating values
}
_related_group_operator = property(attrgetter('group_operator'))
_column_group_operator = property(attrgetter('group_operator'))
_related_group_expression = property(attrgetter('group_expression'))
_column_group_expression = property(attrgetter('group_expression'))
def convert_to_cache(self, value, record, validate=True):
if isinstance(value, dict):
# special case, when an integer field is used as inverse for a one2many
return value.get('id', False)
return int(value or 0)
def convert_to_read(self, value, use_name_get=True):
# Integer values greater than 2^31-1 are not supported in pure XMLRPC,
# so we have to pass them as floats :-(
if value and value > xmlrpclib.MAXINT:
return float(value)
return value
def _update(self, records, value):
# special case, when an integer field is used as inverse for a one2many
records._cache[self] = value.id or 0
def convert_to_export(self, value, env):
if value or value == 0:
return value if env.context.get('export_raw_data') else ustr(value)
return ''
class Float(Field):
""" The precision digits are given by the attribute
:param digits: a pair (total, decimal), or a function taking a database
cursor and returning a pair (total, decimal)
"""
type = 'float'
_slots = {
'_digits': None, # digits argument passed to class initializer
'group_operator': None, # operator for aggregating values
'group_expression': None, # advance expression for aggregating values
}
def __init__(self, string=None, digits=None, **kwargs):
super(Float, self).__init__(string=string, _digits=digits, **kwargs)
@property
def digits(self):
if callable(self._digits):
with fields._get_cursor() as cr:
return self._digits(cr)
else:
return self._digits
def _setup_digits(self, env):
""" Setup the digits for ``self`` and its corresponding column """
pass
def _setup_regular(self, env):
super(Float, self)._setup_regular(env)
self._setup_digits(env)
_related__digits = property(attrgetter('_digits'))
_related_group_operator = property(attrgetter('group_operator'))
_related_group_expression = property(attrgetter('group_expression'))
_description_digits = property(attrgetter('digits'))
_column_digits = property(lambda self: not callable(self._digits) and self._digits)
_column_digits_compute = property(lambda self: callable(self._digits) and self._digits)
_column_group_operator = property(attrgetter('group_operator'))
_column_group_expression = property(attrgetter('group_expression'))
def convert_to_cache(self, value, record, validate=True):
# apply rounding here, otherwise value in cache may be wrong!
value = float(value or 0.0)
digits = self.digits
return float_round(value, precision_digits=digits[1]) if digits else value
def convert_to_export(self, value, env):
if value or value == 0.0:
return value if env.context.get('export_raw_data') else ustr(value)
return ''
class _String(Field):
""" Abstract class for string fields. """
_slots = {
'translate': False, # whether the field is translated
}
_column_translate = property(attrgetter('translate'))
_related_translate = property(attrgetter('translate'))
_description_translate = property(attrgetter('translate'))
class Char(_String):
""" Basic string field, can be length-limited, usually displayed as a
single-line string in clients
:param int size: the maximum size of values stored for that field
:param bool translate: whether the values of this field can be translated
"""
type = 'char'
_slots = {
'size': None, # maximum size of values (deprecated)
}
_column_size = property(attrgetter('size'))
_related_size = property(attrgetter('size'))
_description_size = property(attrgetter('size'))
def _setup_regular(self, env):
super(Char, self)._setup_regular(env)
assert isinstance(self.size, (NoneType, int)), \
"Char field %s with non-integer size %r" % (self, self.size)
def convert_to_cache(self, value, record, validate=True):
if value is None or value is False:
return False
return ustr(value)[:self.size]
class Text(_String):
""" Very similar to :class:`~.Char` but used for longer contents, does not
have a size and usually displayed as a multiline text box.
:param translate: whether the value of this field can be translated
"""
type = 'text'
def convert_to_cache(self, value, record, validate=True):
if value is None or value is False:
return False
return ustr(value)
class Html(_String):
type = 'html'
_slots = {
'sanitize': True, # whether value must be sanitized
'strip_style': False, # whether to strip style attributes
}
_column_sanitize = property(attrgetter('sanitize'))
_related_sanitize = property(attrgetter('sanitize'))
_description_sanitize = property(attrgetter('sanitize'))
_column_strip_style = property(attrgetter('strip_style'))
_related_strip_style = property(attrgetter('strip_style'))
_description_strip_style = property(attrgetter('strip_style'))
def convert_to_cache(self, value, record, validate=True):
if value is None or value is False:
return False
if validate and self.sanitize:
return html_sanitize(value, strip_style=self.strip_style)
return value
class Date(Field):
type = 'date'
@staticmethod
def today(*args):
""" Return the current day in the format expected by the ORM.
This function may be used to compute default values.
"""
return date.today().strftime(DATE_FORMAT)
@staticmethod
def context_today(record, timestamp=None):
""" Return the current date as seen in the client's timezone in a format
fit for date fields. This method may be used to compute default
values.
:param datetime timestamp: optional datetime value to use instead of
the current date and time (must be a datetime, regular dates
can't be converted between timezones.)
:rtype: str
"""
today = timestamp or datetime.now()
context_today = None
tz_name = record._context.get('tz') or record.env.user.tz
if tz_name:
try:
today_utc = pytz.timezone('UTC').localize(today, is_dst=False) # UTC = no DST
context_today = today_utc.astimezone(pytz.timezone(tz_name))
except Exception:
_logger.debug("failed to compute context/client-specific today date, using UTC value for `today`",
exc_info=True)
return (context_today or today).strftime(DATE_FORMAT)
@staticmethod
def from_string(value):
""" Convert an ORM ``value`` into a :class:`date` value. """
if not value:
return None
value = value[:DATE_LENGTH]
return datetime.strptime(value, DATE_FORMAT).date()
@staticmethod
def to_string(value):
""" Convert a :class:`date` value into the format expected by the ORM. """
return value.strftime(DATE_FORMAT) if value else False
def convert_to_cache(self, value, record, validate=True):
if not value:
return False
if isinstance(value, basestring):
if validate:
# force parsing for validation
self.from_string(value)
return value[:DATE_LENGTH]
return self.to_string(value)
def convert_to_export(self, value, env):
if not value:
return ''
return self.from_string(value) if env.context.get('export_raw_data') else ustr(value)
class Datetime(Field):
type = 'datetime'
@staticmethod
def now(*args):
""" Return the current day and time in the format expected by the ORM.
This function may be used to compute default values.
"""
return datetime.now().strftime(DATETIME_FORMAT)
@staticmethod
def context_timestamp(record, timestamp):
"""Returns the given timestamp converted to the client's timezone.
This method is *not* meant for use as a _defaults initializer,
because datetime fields are automatically converted upon
display on client side. For _defaults you :meth:`fields.datetime.now`
should be used instead.
:param datetime timestamp: naive datetime value (expressed in UTC)
to be converted to the client timezone
:rtype: datetime
:return: timestamp converted to timezone-aware datetime in context
timezone
"""
assert isinstance(timestamp, datetime), 'Datetime instance expected'
tz_name = record._context.get('tz') or record.env.user.tz
utc_timestamp = pytz.utc.localize(timestamp, is_dst=False) # UTC = no DST
if tz_name:
try:
context_tz = pytz.timezone(tz_name)
return utc_timestamp.astimezone(context_tz)
except Exception:
_logger.debug("failed to compute context/client-specific timestamp, "
"using the UTC value",
exc_info=True)
return utc_timestamp
@staticmethod
def from_string(value):
""" Convert an ORM ``value`` into a :class:`datetime` value. """
if not value:
return None
value = value[:DATETIME_LENGTH]
if len(value) == DATE_LENGTH:
value += " 00:00:00"
return datetime.strptime(value, DATETIME_FORMAT)
@staticmethod
def to_string(value):
""" Convert a :class:`datetime` value into the format expected by the ORM. """
return value.strftime(DATETIME_FORMAT) if value else False
def convert_to_cache(self, value, record, validate=True):
if not value:
return False
if isinstance(value, basestring):
if validate:
# force parsing for validation
self.from_string(value)
value = value[:DATETIME_LENGTH]
if len(value) == DATE_LENGTH:
value += " 00:00:00"
return value
return self.to_string(value)
def convert_to_export(self, value, env):
if not value:
return ''
return self.from_string(value) if env.context.get('export_raw_data') else ustr(value)
def convert_to_display_name(self, value, record=None):
assert record, 'Record expected'
return Datetime.to_string(Datetime.context_timestamp(record, Datetime.from_string(value)))
class Binary(Field):
type = 'binary'
class Selection(Field):
"""
:param selection: specifies the possible values for this field.
It is given as either a list of pairs (``value``, ``string``), or a
model method, or a method name.
:param selection_add: provides an extension of the selection in the case
of an overridden field. It is a list of pairs (``value``, ``string``).
The attribute ``selection`` is mandatory except in the case of
:ref:`related fields <field-related>` or :ref:`field extensions
<field-incremental-definition>`.
"""
type = 'selection'
_slots = {
'selection': None, # [(value, string), ...], function or method name
}
def __init__(self, selection=None, string=None, **kwargs):
if callable(selection):
from openerp import api
selection = api.expected(api.model, selection)
super(Selection, self).__init__(selection=selection, string=string, **kwargs)
def _setup_regular(self, env):
super(Selection, self)._setup_regular(env)
assert self.selection is not None, "Field %s without selection" % self
def _setup_related(self, env):
super(Selection, self)._setup_related(env)
# selection must be computed on related field
field = self.related_field
self.selection = lambda model: field._description_selection(model.env)
def set_class_name(self, cls, name):
super(Selection, self).set_class_name(cls, name)
# determine selection (applying 'selection_add' extensions)
for field in resolve_all_mro(cls, name, reverse=True):
if isinstance(field, type(self)):
# We cannot use field.selection or field.selection_add here
# because those attributes are overridden by ``set_class_name``.
if 'selection' in field._attrs:
self.selection = field._attrs['selection']
if 'selection_add' in field._attrs:
# use an OrderedDict to update existing values
selection_add = field._attrs['selection_add']
self.selection = OrderedDict(self.selection + selection_add).items()
else:
self.selection = None
def _description_selection(self, env):
""" return the selection list (pairs (value, label)); labels are
translated according to context language
"""
selection = self.selection
if isinstance(selection, basestring):
return getattr(env[self.model_name], selection)()
if callable(selection):
return selection(env[self.model_name])
# translate selection labels
if env.lang:
name = "%s,%s" % (self.model_name, self.name)
translate = partial(
env['ir.translation']._get_source, name, 'selection', env.lang)
return [(value, translate(label) if label else label) for value, label in selection]
else:
return selection
@property
def _column_selection(self):
if isinstance(self.selection, basestring):
method = self.selection
return lambda self, *a, **kw: getattr(self, method)(*a, **kw)
else:
return self.selection
def get_values(self, env):
""" return a list of the possible values """
selection = self.selection
if isinstance(selection, basestring):
selection = getattr(env[self.model_name], selection)()
elif callable(selection):
selection = selection(env[self.model_name])
return [value for value, _ in selection]
def convert_to_cache(self, value, record, validate=True):
if not validate:
return value or False
if value in self.get_values(record.env):
return value
elif not value:
return False
raise ValueError("Wrong value for %s: %r" % (self, value))
def convert_to_export(self, value, env):
if not isinstance(self.selection, list):
# FIXME: this reproduces an existing buggy behavior!
return value if value else ''
for item in self._description_selection(env):
if item[0] == value:
return item[1]
return False
class Reference(Selection):
type = 'reference'
_slots = {
'size': None, # maximum size of values (deprecated)
}
_related_size = property(attrgetter('size'))
_column_size = property(attrgetter('size'))
def _setup_regular(self, env):
super(Reference, self)._setup_regular(env)
assert isinstance(self.size, (NoneType, int)), \
"Reference field %s with non-integer size %r" % (self, self.size)
def convert_to_cache(self, value, record, validate=True):
if isinstance(value, BaseModel):
if ((not validate or value._name in self.get_values(record.env))
and len(value) <= 1):
return value.with_env(record.env) or False
elif isinstance(value, basestring):
res_model, res_id = value.split(',')
return record.env[res_model].browse(int(res_id))
elif not value:
return False
raise ValueError("Wrong value for %s: %r" % (self, value))
def convert_to_read(self, value, use_name_get=True):
return "%s,%s" % (value._name, value.id) if value else False
def convert_to_export(self, value, env):
return value.name_get()[0][1] if value else ''
def convert_to_display_name(self, value, record=None):
return ustr(value and value.display_name)
class _Relational(Field):
""" Abstract class for relational fields. """
relational = True
_slots = {
'domain': [], # domain for searching values
'context': {}, # context for searching values
}
def _setup_regular(self, env):
super(_Relational, self)._setup_regular(env)
if self.comodel_name not in env.registry:
_logger.warning("Field %s with unknown comodel_name %r"
% (self, self.comodel_name))
self.comodel_name = '_unknown'
@property
def _related_domain(self):
if callable(self.domain):
# will be called with another model than self's
return lambda recs: self.domain(recs.env[self.model_name])
else:
# maybe not correct if domain is a string...
return self.domain
_related_context = property(attrgetter('context'))
_description_relation = property(attrgetter('comodel_name'))
_description_context = property(attrgetter('context'))
def _description_domain(self, env):
return self.domain(env[self.model_name]) if callable(self.domain) else self.domain
_column_obj = property(attrgetter('comodel_name'))
_column_domain = property(attrgetter('domain'))
_column_context = property(attrgetter('context'))
def null(self, env):
return env[self.comodel_name]
def modified(self, records):
# Invalidate cache for self.inverse_fields, too. Note that recomputation
# of fields that depend on self.inverse_fields is already covered by the
# triggers (see above).
spec = super(_Relational, self).modified(records)
for invf in self.inverse_fields:
spec.append((invf, None))
return spec
class Many2one(_Relational):
""" The value of such a field is a recordset of size 0 (no
record) or 1 (a single record).
:param comodel_name: name of the target model (string)
:param domain: an optional domain to set on candidate values on the
client side (domain or string)
:param context: an optional context to use on the client side when
handling that field (dictionary)
:param ondelete: what to do when the referred record is deleted;
possible values are: ``'set null'``, ``'restrict'``, ``'cascade'``
:param auto_join: whether JOINs are generated upon search through that
field (boolean, by default ``False``)
:param delegate: set it to ``True`` to make fields of the target model
accessible from the current model (corresponds to ``_inherits``)
The attribute ``comodel_name`` is mandatory except in the case of related
fields or field extensions.
"""
type = 'many2one'
_slots = {
'ondelete': 'set null', # what to do when value is deleted
'auto_join': False, # whether joins are generated upon search
'delegate': False, # whether self implements delegation
}
def __init__(self, comodel_name=None, string=None, **kwargs):
super(Many2one, self).__init__(comodel_name=comodel_name, string=string, **kwargs)
def set_class_name(self, cls, name):
super(Many2one, self).set_class_name(cls, name)
# determine self.delegate
if not self.delegate:
self.delegate = name in cls._inherits.values()
_column_ondelete = property(attrgetter('ondelete'))
_column_auto_join = property(attrgetter('auto_join'))
def _update(self, records, value):
""" Update the cached value of ``self`` for ``records`` with ``value``. """
records._cache[self] = value
def convert_to_cache(self, value, record, validate=True):
if isinstance(value, (NoneType, int, long)):
return record.env[self.comodel_name].browse(value)
if isinstance(value, BaseModel):
if value._name == self.comodel_name and len(value) <= 1:
return value.with_env(record.env)
raise ValueError("Wrong value for %s: %r" % (self, value))
elif isinstance(value, tuple):
return record.env[self.comodel_name].browse(value[0])
elif isinstance(value, dict):
return record.env[self.comodel_name].new(value)
else:
return self.null(record.env)
def convert_to_read(self, value, use_name_get=True):
if use_name_get and value:
# evaluate name_get() as superuser, because the visibility of a
# many2one field value (id and name) depends on the current record's
# access rights, and not the value's access rights.
try:
value_sudo = value.sudo()
# performance trick: make sure that all records of the same
# model as value in value.env will be prefetched in value_sudo.env
value_sudo.env.prefetch[value._name].update(value.env.prefetch[value._name])
return value_sudo.name_get()[0]
except MissingError:
# Should not happen, unless the foreign key is missing.
return False
else:
return value.id
def convert_to_write(self, value, target=None, fnames=None):
return value.id
def convert_to_onchange(self, value):
return value.id
def convert_to_export(self, value, env):
return value.name_get()[0][1] if value else ''
def convert_to_display_name(self, value, record=None):
return ustr(value.display_name)
class UnionUpdate(SpecialValue):
""" Placeholder for a value update; when this value is taken from the cache,
it returns ``record[field.name] | value`` and stores it in the cache.
"""
def __init__(self, field, record, value):
self.args = (field, record, value)
def get(self):
field, record, value = self.args
# in order to read the current field's value, remove self from cache
del record._cache[field]
# read the current field's value, and update it in cache only
record._cache[field] = new_value = record[field.name] | value
return new_value
class _RelationalMulti(_Relational):
""" Abstract class for relational fields *2many. """
def _update(self, records, value):
""" Update the cached value of ``self`` for ``records`` with ``value``. """
for record in records:
if self in record._cache:
record._cache[self] = record[self.name] | value
else:
record._cache[self] = UnionUpdate(self, record, value)
def convert_to_cache(self, value, record, validate=True):
if isinstance(value, BaseModel):
if value._name == self.comodel_name:
return value.with_env(record.env)
elif isinstance(value, list):
# value is a list of record ids or commands
comodel = record.env[self.comodel_name]
ids = OrderedSet(record[self.name].ids)
# modify ids with the commands
for command in value:
if isinstance(command, (tuple, list)):
if command[0] == 0:
ids.add(comodel.new(command[2]).id)
elif command[0] == 1:
comodel.browse(command[1]).update(command[2])
ids.add(command[1])
elif command[0] == 2:
# note: the record will be deleted by write()
ids.discard(command[1])
elif command[0] == 3:
ids.discard(command[1])
elif command[0] == 4:
ids.add(command[1])
elif command[0] == 5:
ids.clear()
elif command[0] == 6:
ids = OrderedSet(command[2])
elif isinstance(command, dict):
ids.add(comodel.new(command).id)
else:
ids.add(command)
# return result as a recordset
return comodel.browse(list(ids))
elif not value:
return self.null(record.env)
raise ValueError("Wrong value for %s: %s" % (self, value))
def convert_to_read(self, value, use_name_get=True):
return value.ids
def convert_to_write(self, value, target=None, fnames=None):
# remove/delete former records
if target is None:
set_ids = []
result = [(6, 0, set_ids)]
add_existing = lambda id: set_ids.append(id)
else:
tag = 2 if self.type == 'one2many' else 3
result = [(tag, record.id) for record in target[self.name] - value]
add_existing = lambda id: result.append((4, id))
if fnames is None:
# take all fields in cache, except the inverses of self
fnames = set(value._fields) - set(MAGIC_COLUMNS)
for invf in self.inverse_fields:
fnames.discard(invf.name)
# add new and existing records
for record in value:
if not record.id:
values = {k: v for k, v in record._cache.iteritems() if k in fnames}
values = record._convert_to_write(values)
result.append((0, 0, values))
elif record._is_dirty():
values = {k: record._cache[k] for k in record._get_dirty() if k in fnames}
values = record._convert_to_write(values)
result.append((1, record.id, values))
else:
add_existing(record.id)
return result
def convert_to_export(self, value, env):
return ','.join(name for id, name in value.name_get()) if value else ''
def convert_to_display_name(self, value, record=None):
raise NotImplementedError()
def _compute_related(self, records):
""" Compute the related field ``self`` on ``records``. """
for record in records:
value = record
# traverse the intermediate fields, and keep at most one record
for name in self.related[:-1]:
value = value[name][:1]
record[self.name] = value[self.related[-1]]
class One2many(_RelationalMulti):
""" One2many field; the value of such a field is the recordset of all the
records in ``comodel_name`` such that the field ``inverse_name`` is equal to
the current record.
:param comodel_name: name of the target model (string)
:param inverse_name: name of the inverse ``Many2one`` field in
``comodel_name`` (string)
:param domain: an optional domain to set on candidate values on the
client side (domain or string)
:param context: an optional context to use on the client side when
handling that field (dictionary)
:param auto_join: whether JOINs are generated upon search through that
field (boolean, by default ``False``)
:param limit: optional limit to use upon read (integer)
The attributes ``comodel_name`` and ``inverse_name`` are mandatory except in
the case of related fields or field extensions.
"""
type = 'one2many'
_slots = {
'inverse_name': None, # name of the inverse field
'auto_join': False, # whether joins are generated upon search
'limit': None, # optional limit to use upon read
'copy': False, # o2m are not copied by default
}
def __init__(self, comodel_name=None, inverse_name=None, string=None, **kwargs):
super(One2many, self).__init__(
comodel_name=comodel_name,
inverse_name=inverse_name,
string=string,
**kwargs
)
def _setup_regular(self, env):
super(One2many, self)._setup_regular(env)
if self.inverse_name:
# link self to its inverse field and vice-versa
comodel = env[self.comodel_name]
invf = comodel._fields[self.inverse_name]
# In some rare cases, a ``One2many`` field can link to ``Int`` field
# (res_model/res_id pattern). Only inverse the field if this is
# a ``Many2one`` field.
if isinstance(invf, Many2one):
self.inverse_fields += (invf,)
invf.inverse_fields += (self,)
_description_relation_field = property(attrgetter('inverse_name'))
_column_fields_id = property(attrgetter('inverse_name'))
_column_auto_join = property(attrgetter('auto_join'))
_column_limit = property(attrgetter('limit'))
class Many2many(_RelationalMulti):
""" Many2many field; the value of such a field is the recordset.
:param comodel_name: name of the target model (string)
The attribute ``comodel_name`` is mandatory except in the case of related
fields or field extensions.
:param relation: optional name of the table that stores the relation in
the database (string)
:param column1: optional name of the column referring to "these" records
in the table ``relation`` (string)
:param column2: optional name of the column referring to "those" records
in the table ``relation`` (string)
The attributes ``relation``, ``column1`` and ``column2`` are optional. If not
given, names are automatically generated from model names, provided
``model_name`` and ``comodel_name`` are different!
:param domain: an optional domain to set on candidate values on the
client side (domain or string)
:param context: an optional context to use on the client side when
handling that field (dictionary)
:param limit: optional limit to use upon read (integer)
"""
type = 'many2many'
_slots = {
'relation': None, # name of table
'column1': None, # column of table referring to model
'column2': None, # column of table referring to comodel
'limit': None, # optional limit to use upon read
}
def __init__(self, comodel_name=None, relation=None, column1=None, column2=None,
string=None, **kwargs):
super(Many2many, self).__init__(
comodel_name=comodel_name,
relation=relation,
column1=column1,
column2=column2,
string=string,
**kwargs
)
def _setup_regular(self, env):
super(Many2many, self)._setup_regular(env)
if not self.relation and self.store:
# retrieve self.relation from the corresponding column
column = self.to_column()
if isinstance(column, fields.many2many):
self.relation, self.column1, self.column2 = \
column._sql_names(env[self.model_name])
if self.relation:
m2m = env.registry._m2m
# if inverse field has already been setup, it is present in m2m
invf = m2m.get((self.relation, self.column2, self.column1))
if invf:
self.inverse_fields += (invf,)
invf.inverse_fields += (self,)
else:
# add self in m2m, so that its inverse field can find it
m2m[(self.relation, self.column1, self.column2)] = self
_column_rel = property(attrgetter('relation'))
_column_id1 = property(attrgetter('column1'))
_column_id2 = property(attrgetter('column2'))
_column_limit = property(attrgetter('limit'))
class Serialized(Field):
""" Minimal support for existing sparse and serialized fields. """
type = 'serialized'
def convert_to_cache(self, value, record, validate=True):
return value or {}
class Id(Field):
""" Special case for field 'id'. """
type = 'integer'
_slots = {
'string': 'ID',
'store': True,
'readonly': True,
}
def to_column(self):
self.column = fields.integer(self.string)
return self.column
def __get__(self, record, owner):
if record is None:
return self # the field is accessed through the class owner
if not record:
return False
return record.ensure_one()._ids[0]
def __set__(self, record, value):
raise TypeError("field 'id' cannot be assigned")
# imported here to avoid dependency cycle issues
from openerp import SUPERUSER_ID, registry
from .exceptions import Warning, AccessError, MissingError
from .models import BaseModel, MAGIC_COLUMNS
from .osv import fields
| Antiun/odoo | openerp/fields.py | Python | agpl-3.0 | 75,603 |