repo_name
stringlengths 6
61
| path
stringlengths 4
230
| copies
stringlengths 1
3
| size
stringlengths 4
6
| text
stringlengths 1.01k
850k
| license
stringclasses 15
values | hash
int64 -9,220,477,234,079,998,000
9,219,060,020B
| line_mean
float64 11.6
96.6
| line_max
int64 32
939
| alpha_frac
float64 0.26
0.9
| autogenerated
bool 1
class | ratio
float64 1.62
6.1
| config_test
bool 2
classes | has_no_keywords
bool 2
classes | few_assignments
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
cjbrasher/LipidFinder | LipidFinder/Configuration/LFParametersGUI.py | 1 | 38609 | # Copyright (c) 2019 J. Alvarez-Jarreta and C.J. Brasher
#
# This file is part of the LipidFinder software tool and governed by the
# 'MIT License'. Please see the LICENSE file that should have been
# included as part of this software.
"""Graphical User Interface (GUI) to manage the parameters' collection.
"""
from collections import OrderedDict
import os
from IPython.display import display
from ipywidgets import widgets, Layout
import pandas
from LipidFinder.Configuration import LFParameters
from LipidFinder._utils import normalise_path
class _TaggedToggleButton(widgets.ToggleButton):
"""Add "tag" attribute to widgets.ToggleButton class."""
def __init__(self, tag, **kwargs):
widgets.ToggleButton.__init__(self, **kwargs)
self.tag = tag
class _TaggedCheckbox(widgets.Checkbox):
"""Add "tag" attribute to widgets.Checkbox class."""
def __init__(self, tag, **kwargs):
widgets.Checkbox.__init__(self, **kwargs)
self.tag = tag
class _TaggedButton(widgets.Button):
"""Add "tag" attribute to widgets.Button class."""
def __init__(self, tag, **kwargs):
widgets.Button.__init__(self, **kwargs)
self.tag = tag
class LFParametersGUI(LFParameters):
"""A LFParametersGUI object stores a set of LipidFinder parameters
to be used in the specified module.
This subclass of LFParameters implements a graphical interface using
jupyter notebook's widgets, executed during the object creation. It
allows the user to check, change and save each active parameter's
value interactively.
Attributes:
_parameters (Private[collections.OrderedDict])
Dictionary where the parameters and their associated
information are stored.
_floatPointPrecision (Private[int])
Number of digits after the radix point in floats.
_floatStep (Private[float])
Minimum difference between two consecutive float numbers.
_style (Private[dict])
Dictionary with the default style settings for widgets.
_inputWidth (Private[str])
String representation of the default width of input widgets.
_widgets (Private[collections.OrderedDict])
Dictionary where the widgets for each parameter are stored.
Examples:
LFParametersGUI objects can be created as follows:
>>> from Configuration.LFParametersGUI import
... LFParametersGUI
>>> LFParametersGUI()
>>> LFParametersGUI(src='/home/user/my_parameters.json')
The former will load the default PeakFilter parameters and will
load and display the interface afterwards. The latter will load
the default PeakFilter parameters, override them with the values
found in the JSON file provided, and finally it will load and
display the interface.
Alternatively, a specific module can be introduced as argument:
>>> from Configuration.LFParametersGUI import
... LFParametersGUI
>>> LFParametersGUI(module='mssearch')
"""
def __init__(self, precision=4, **kwargs):
# type: (int, ...) -> LFParametersGUI
"""Constructor of the class LFParametersGUI.
First, the module's parameters template file is loaded. Next, if
a source JSON parameters file path is provided, the default
values are overwritten by the corresponding new (valid) values.
Finally, the graphical user interface is displayed.
Keyword Arguments:
precision -- number of decimal digits to use with floats
(e.g. a precision of 2 forces a difference of
0.01 between any two consecutive float numbers)
[default: 4]
"""
# Minimum difference between two consecutive float numbers
self._floatPointPrecision = precision
self._floatStep = 10 ** -(precision)
# Load the parameters dictionary using parent class' constructor
LFParameters.__init__(self, **kwargs)
# Default style
self._style = {'description_width': '0px'}
# Default width of input widgets
self._inputWidth = '26%'
# Generate an ordered dict to store each parameter's set of
# widgets in the same order as in the parameters' dict
self._widgets = OrderedDict()
# Create every widget of the GUI
for key, data in self._parameters.items():
disabled = not self._is_active(key)
# Load the information of each parameter
self._widgets[key] = [self._create_label(key, disabled),
self._create_help_icon(key, disabled)]
# Create the input widget or container of input widgets for
# each parameter type
if (data['type'] == 'bool'):
self._widgets[key].append(
self._create_bool_widget(key, disabled))
elif (data['type'] == 'int'):
self._widgets[key].append(
self._create_int_widget(key, disabled))
elif (data['type'] == 'float'):
self._widgets[key].append(
self._create_float_widget(key, disabled))
elif (data['type'] == 'selection'):
self._widgets[key].append(
self._create_selection_widget(key, disabled))
elif (data['type'] == 'path'):
self._widgets[key].append(
self._create_path_widget(key, disabled))
elif (data['type'] == 'int range'):
self._widgets[key].append(
self._create_int_range_widget(key, disabled))
elif (data['type'] == 'float range'):
self._widgets[key].append(
self._create_float_range_widget(key, disabled))
elif (data['type'] == 'multiselection'):
self._widgets[key].append(
self._create_multiselection_widget(key, disabled))
elif (data['type'] == 'pairs'):
self._widgets[key].append(
self._create_pairs_widget(key, disabled))
else: # data['type'] == 'str'
self._widgets[key].append(
self._create_str_widget(key, disabled))
# Display the GUI
hboxLayout = Layout(align_items='center')
for key, widgetList in self._widgets.items():
display(widgets.HBox(widgetList, layout=hboxLayout))
# Finally, create the save interface to allow the user to save
# the current parameters values in a JSON file
display(widgets.HBox([], layout=Layout(height='15px')))
display(widgets.HBox([], layout=Layout(height='0px',
border='2px solid lightgray')))
display(widgets.HBox([], layout=Layout(height='2px')))
self._widgets['save'] = self._create_save_widget()
hboxLayout = Layout(justify_content='space-between',
align_items='center')
display(widgets.HBox(self._widgets['save'], layout=hboxLayout))
def _create_label(self, key, disabled):
# type: (str, bool) -> widgets.HTML
"""Return an HTML widget with the parameter's description.
If 'disabled' is False, the text will be in black, otherwise it
will be in gray.
Keyword Arguments:
key -- name of the parameter
disabled -- is the parameter/widget disabled?
"""
text = self._parameters[key]['description']
label = ("<p style=\"font-size:110%; line-height:19px; color:{0};\">{1}"
"</p>").format('Gray' if disabled else 'Black', text)
return widgets.HTML(value=label, style=self._style,
layout=Layout(width='50%'))
def _create_help_icon(self, key, disabled):
# type: (str, bool) -> widgets.HTML
"""Return an HTML widget with the parameter's help as tooltip of
a help icon.
Keyword Arguments:
key -- name of the parameter
disabled -- is the parameter/widget disabled?
"""
if ('help' in self._parameters[key]):
code = ("<link rel=\"stylesheet\" href=\"https://fonts.googleapis.c"
"om/icon?family=Material+Icons\"><i class=\"material-icons"
"\" style=\"color:{0}; font-size:18px; display:inline"
"-flex; vertical-align:middle;\" title=\"{1}\">help</i>"
"").format("SteelBlue", self._parameters[key]['help'])
else:
code = ''
layout = Layout(width='2%',
visibility='hidden' if disabled else 'visible')
return widgets.HTML(value=code, style=self._style, layout=layout)
def _create_str_widget(self, key, disabled):
# type: (str, bool) -> widgets.Text
"""Return a Text widget with the parameter's value.
Keyword Arguments:
key -- name of the parameter
disabled -- is the parameter/widget disabled?
"""
if ('example' in self._parameters[key]):
example = self._parameters[key]['example']
else:
example = ''
inputWidget = widgets.Text(
value=self[key], description=key, placeholder=example,
style=self._style, layout=Layout(width=self._inputWidth),
continuous_update=False, disabled=disabled)
# Add handler for when the "value" trait changes
inputWidget.observe(self._default_handler, names='value')
return inputWidget
def _create_bool_widget(self, key, disabled):
# type: (str, bool) -> widgets.HBox
"""Return an HBox containing a ToggleButton widget to represent
the parameter's value.
Keyword Arguments:
key -- name of the parameter
disabled -- is the parameter/widget disabled?
"""
inputWidget = _TaggedToggleButton(
value=self[key], description='Yes' if self[key] else 'No',
tag=key, style=self._style, layout=Layout(width='50%'),
button_style='primary', disabled=disabled)
# Add handler for when the "value" trait changes
inputWidget.observe(self._bool_handler, names='value')
layout = Layout(width=self._inputWidth, justify_content='center')
return widgets.HBox([inputWidget], layout=layout)
def _create_int_widget(self, key, disabled):
# type: (str, bool) -> widgets.BoundedIntText
"""Return a BoundedIntText widget with the parameter's value.
Keyword Arguments:
key -- name of the parameter
disabled -- is the parameter/widget disabled?
"""
inputWidget = widgets.BoundedIntText(
value=self[key], description=key, min=self._min(key),
max=self._max(key), style=self._style,
layout=Layout(width=self._inputWidth), continuous_update=False,
disabled=disabled)
# Save the widget's value in case its constructor automatically
# replaces an empty one given as argument
self._parameters[key]['value'] = inputWidget.value
# Add handler for when the "value" trait changes
inputWidget.observe(self._default_handler, names='value')
return inputWidget
def _create_float_widget(self, key, disabled):
# type: (str, bool) -> widgets.BoundedFloatText
"""Return a BoundedFloatText widget with the parameter's value.
Keyword Arguments:
key -- name of the parameter
disabled -- is the parameter/widget disabled?
"""
inputWidget = widgets.BoundedFloatText(
value=self[key], description=key, min=self._min(key),
max=self._max(key), step=self._floatStep, style=self._style,
layout=Layout(width=self._inputWidth), continuous_update=False,
disabled=disabled)
# Save the widget's value in case its constructor automatically
# replaces an empty one given as argument
self._parameters[key]['value'] = inputWidget.value
# Add handler for when the "value" trait changes
inputWidget.observe(self._default_handler, names='value')
return inputWidget
def _create_selection_widget(self, key, disabled):
# type: (str, bool) -> widgets.Dropdown
"""Return a Dropdown widget with the parameter's options and its
current value selected.
Keyword Arguments:
key -- name of the parameter
disabled -- is the parameter/widget disabled?
"""
inputWidget = widgets.Dropdown(
options=self._parameters[key]['options'], value=self[key],
description=key, style=self._style,
layout=Layout(width=self._inputWidth), disabled=disabled)
# Add handler for when the "value" trait changes
inputWidget.observe(self._default_handler, names='value')
return inputWidget
def _create_path_widget(self, key, disabled):
# type: (str, bool) -> widgets.HBox
"""Return an HBox containing a Text widget with the parameter's
value.
If the Text widget is enabled and the file does not exist, a
warning icon will be displayed next to it to alert the user.
Keyword Arguments:
key -- name of the parameter
disabled -- is the parameter/widget disabled?
"""
inputWidget = widgets.Text(
value=self[key], description=key, style=self._style,
layout=Layout(width='92%'), continuous_update=False,
disabled=disabled)
# Add handler for when the "value" trait changes
inputWidget.observe(self._path_handler, names='value')
# Create an HTML widget with a warning icon that will be
# displayed if the Text widget is enabled and the file does not
# exist
code = ("<link rel=\"stylesheet\" href=\"https://fonts.googleapis.com/i"
"con?family=Material+Icons\"><i class=\"material-icons\" style="
"\"font-size:18px; color:Red; display:inline-flex; vertical-ali"
"gn:middle;\" title=\"File not found!\">warning</i>")
warn = not disabled and not os.path.isfile(self[key])
layout = Layout(width='5%',
visibility='visible' if warn else 'hidden')
warnWidget = widgets.HTML(value=code, style=self._style, layout=layout)
layout = Layout(width='46%', justify_content='space-between')
return widgets.HBox([inputWidget, warnWidget], layout=layout)
def _create_int_range_widget(self, key, disabled):
# type: (str, bool) -> widgets.HBox
"""Return an HBox containing two BoundedIntText widgets with the
parameter's range values.
The widgets are created to fulfill the "int range" type
condition: lower_bound < upper_bound
Keyword Arguments:
key -- name of the parameter
disabled -- is the parameter/widget disabled?
"""
lowerBound = widgets.BoundedIntText(
value=self[key][0], description=key, min=self._min(key),
max=self[key][1] - 1, style=self._style,
layout=Layout(width='50%'), continuous_update=False,
disabled=disabled)
# Save the widget's value in case its constructor automatically
# replaces an empty one given as argument
self._parameters[key]['value'][0] = lowerBound.value
# Add handler for when the "value" trait changes
lowerBound.observe(self._range_handler, names='value')
upperBound = widgets.BoundedIntText(
value=self[key][1], description=key, min=self[key][0] + 1,
max=self._max(key), style=self._style,
layout=Layout(width='50%'), continuous_update=False,
disabled=disabled)
# Save the widget's value in case its constructor automatically
# replaces an empty one given as argument
self._parameters[key]['value'][1] = upperBound.value
# Add handler for when the "value" trait changes
upperBound.observe(self._range_handler, names='value')
return widgets.HBox([lowerBound, upperBound],
layout=Layout(width=self._inputWidth))
def _create_float_range_widget(self, key, disabled):
# type: (str, bool) -> widgets.HBox
"""Return an HBox containing two BoundedFloatText widgets with
the parameter's range values.
The widgets are created to fulfill the "float range" type
condition: lower_bound < upper_bound
Keyword Arguments:
key -- name of the parameter
disabled -- is the parameter/widget disabled?
"""
lowerBound = widgets.BoundedFloatText(
value=self[key][0], description=key, min=self._min(key),
max=self[key][1] - self._floatStep, step=self._floatStep,
style=self._style, layout=Layout(width='50%'),
continuous_update=False, disabled=disabled)
# Save the widget's value in case its constructor automatically
# replaces an empty one given as argument
self._parameters[key]['value'][0] = lowerBound.value
# Add handler for when the "value" trait changes
lowerBound.observe(self._range_handler, names='value')
upperBound = widgets.BoundedFloatText(
value=self[key][1], description=key,
min=self[key][0] + self._floatStep, max=self._max(key),
step=self._floatStep, style=self._style,
layout=Layout(width='50%'), continuous_update=False,
disabled=disabled)
# Save the widget's value in case its constructor automatically
# replaces an empty one given as argument
self._parameters[key]['value'][1] = upperBound.value
# Add handler for when the "value" trait changes
upperBound.observe(self._range_handler, names='value')
return widgets.HBox([lowerBound, upperBound],
layout=Layout(width=self._inputWidth))
def _create_multiselection_widget(self, key, disabled):
# type: (str, bool) -> widgets.Box
"""Return a Box containing as many Checkbox widgets as
parameter's options, with those in its "value" field checked.
Keyword Arguments:
key -- name of the parameter
disabled -- is the parameter/widget disabled?
"""
itemWidgets = []
for item in self._parameters[key]['options']:
layoutWidth = '23%' if (len(item) <= 10) else '48%'
inputWidget = _TaggedCheckbox(
value=item in self[key], description=item, tag=key,
style=self._style, layout=Layout(width=layoutWidth),
disabled=disabled)
# Add handler for when the "value" trait changes
inputWidget.observe(self._multiselection_handler, names='value')
itemWidgets.append(inputWidget)
layout = Layout(width='46%', display='flex', flex_flow='row wrap',
justify_content='space-between')
return widgets.Box(itemWidgets, layout=layout)
def _create_pairs_widget(self, key, disabled):
# type: (str, bool) -> widgets.HBox
"""Return an HBox containing the interface to add and remove
pairs of available elements.
The term "available elements" refers to those elements in the
first column of the CSV file's path stored under the parameter's
"file" key. Users will not be able to add existing pairs or
pairs formed by the same element twice.
Keyword Arguments:
key -- name of the parameter
disabled -- is the parameter/widget disabled?
"""
# Load the list of available elements from the first column of
# the CSV file saved under the parameter's "file" key
srcFilePath = self[self._parameters[key]['file']]
options = pandas.read_csv(srcFilePath).iloc[:, 0].tolist()
# Create two Select widgets with the list of available elements
leftSelect = widgets.Select(
options=options, rows=4, style=self._style,
layout=Layout(width='20%'), disabled=disabled)
rightSelect = widgets.Select(
options=options, rows=4, style=self._style,
layout=Layout(width='20%'), disabled=disabled)
# Create the add and remove buttons with the handler to add and
# remove pairs, respectively
addButton = _TaggedButton(
description='Pair >>', tooltip='Add new pair', tag=key,
layout=Layout(width='95%'), disabled=disabled)
# Add handlerfor when the button is clicked
addButton.on_click(self._pairs_add_handler)
delButton = _TaggedButton(
description='<< Remove', tooltip='Remove selected pair',
tag=key, layout=Layout(width='95%'), disabled=disabled)
# Add handler for when the button is clicked
delButton.on_click(self._pairs_del_handler)
layout = Layout(width='21%', justify_content='space-around')
# Hold the buttons in a VBox to get the desired layout
buttonsBox = widgets.VBox([addButton, delButton], layout=layout)
# Create a Select widget with the parameter's list of pairs
pairs = [' , '.join(x) for x in self[key]]
pairsSelect = widgets.Select(
options=pairs, rows=4, style=self._style,
layout=Layout(width='28%'), disabled=disabled)
layout = Layout(width='46%', justify_content='space-around')
return widgets.HBox([leftSelect, rightSelect, buttonsBox, pairsSelect],
layout=layout)
def _create_save_widget(self):
# type: () -> list
"""Return a list containing the interface to save the current
parameters values as a JSON file in an introduced path.
"""
text = ("<p style=\"font-size:110%; line-height:19px; color:Black;\">"
"Where do you want to save the new set of parameters?</p>")
label = widgets.HTML(value=text, style=self._style,
layout=Layout(width='38%'))
# Create the path input widget (Text) with a default path and
# file name
defaultPath = normalise_path("parameters.json")
inputWidget = widgets.Text(
value=defaultPath, placeholder=defaultPath, style=self._style,
layout=Layout(width='40%'), continuous_update=False)
# Add handler for when the "value" trait changes
inputWidget.observe(self._save_path_handler, names='value')
# Create an HTML widget with a warning icon that will be
# displayed if the directory path does not exist
code = ("<link rel=\"stylesheet\" href=\"https://fonts.googleapis.com/i"
"con?family=Material+Icons\"><i class=\"material-icons\" style="
"\"font-size:18px; color:Red; display:inline-flex; vertical-ali"
"gn:middle;\" title=\"Path not found!\">warning</i>")
dirPath = os.path.split(inputWidget.value)[0]
visibility = 'visible' if not os.path.isdir(dirPath) else 'hidden'
layout = Layout(width='2%', visibility=visibility)
warnWidget = widgets.HTML(value=code, style=self._style, layout=layout)
# Create a save button that will be active only if every active
# parameter is valid and the destination path exists
saveButton = widgets.Button(
description='Save', button_style='danger',
tooltip='Save parameters in a JSON file',
layout=Layout(width='12%', height='35px'),
disabled=not self._valid_parameters())
# Add handler for when the button is clicked
saveButton.on_click(self._save_button_handler)
return [label, inputWidget, warnWidget, saveButton]
def _update(self):
# type: () -> None
"""Return an HBox containing the interface to add and remove
pairs of available elements.
The term "available elements" refers to those elements in the
first column of the CSV file's path stored under the parameter's
"file" key. Users will not be able to add existing pairs or
pairs formed by the same element twice. If the CSV file path
changes, the pairs list will be emptied and the set of available
elements will be updated.
"""
# Update the status and/or visibility of each parameter's widget
for key in self._parameters.keys():
interface = self._widgets[key]
disabled = not self._is_active(key)
if (disabled):
interface[0].value = interface[0].value.replace('Black', 'Gray')
else:
interface[0].value = interface[0].value.replace('Gray', 'Black')
interface[1].layout.visibility = 'hidden' if disabled else 'visible'
typeStr = self._parameters[key]['type']
if (typeStr == 'bool'):
interface[2].children[0].disabled = disabled
elif (typeStr in ['int', 'float']):
# Update minimum and maximum bounds too
interface[2].min = self._min(key)
interface[2].max = self._max(key)
interface[2].disabled = disabled
elif (typeStr == 'path'):
interface[2].children[0].disabled = disabled
# Display the warning widget if the parameter is enabled
# and the file does not exist
if (not disabled and not os.path.isfile(self[key])):
interface[2].children[1].layout.visibility = 'visible'
else:
interface[2].children[1].layout.visibility = 'hidden'
elif (typeStr in ['int range', 'float range']):
# Update minimum and maximum bounds of the range too
interface[2].children[0].min = self._min(key)
interface[2].children[0].disabled = disabled
interface[2].children[1].max = self._max(key)
interface[2].children[1].disabled = disabled
elif (typeStr == 'multiselection'):
for child in interface[2].children:
child.disabled = disabled
elif (typeStr == 'pairs'):
interface[2].children[0].disabled = disabled
interface[2].children[1].disabled = disabled
for grandchild in interface[2].children[2].children:
grandchild.disabled = disabled
interface[2].children[3].disabled = disabled
else:
interface[2].disabled = disabled
# Ensure the save button should be available and ready to save
# the new set of parameters
self._widgets['save'][3].description = 'Save'
self._widgets['save'][3].icon = ''
self._widgets['save'][3].disabled = not self._valid_parameters()
def _default_handler(self, change):
# type: (dict) -> None
"""Handle the "value" trait change assigning the new value to
the corresponding parameter.
The update() method is launched at the end to ensure every
widget is updated according to the change in this parameter.
Keyword Arguments:
change -- dict holding the information about the change
"""
key = change['owner'].description
self._parameters[key]['value'] = change['new']
self._update()
def _bool_handler(self, change):
# type: (dict) -> None
"""Handle the "value" trait change assigning the new value to
the corresponding "bool" type parameter.
The update() method is launched at the end to ensure every
widget is updated according to the change in this parameter.
Keyword Arguments:
change -- dict holding the information about the change
"""
key = change['owner'].tag
self._parameters[key]['value'] = change['new']
# Change ToggleButton's description to "Yes" or "No" depending
# on whether its new value is True or False, respectively
change['owner'].description = 'Yes' if change['new'] else 'No'
self._update()
def _path_handler(self, change):
# type: (dict) -> None
"""Handle the "value" trait change assigning the new value to
the corresponding "path" type parameter.
The update() method is launched at the end to ensure every
widget is updated according to the change in this parameter.
Keyword Arguments:
change -- dict holding the information about the change
"""
key = change['owner'].description
self._parameters[key]['value'] = normalise_path(change['new'])
# Replace the introduced path by its normalised version to
# provide the user with more information in case there is
# something wrong with the path
change['owner'].value = self[key]
# Get the "pairs" type parameter that has this parameter in its
# "field" key to update the contents of its widgets
for param, data in self._parameters.items():
if ((data['type'] == 'pairs') and (data['file'] == key)):
pairsWidget = self._widgets[param][2]
if (os.path.isfile(self[key])):
# Update the information of available elements
options = pandas.read_csv(self[key]).iloc[:, 0].tolist()
pairsWidget.children[0].options = options
pairsWidget.children[1].options = options
else:
# Since the file does not exist, there are no
# available elements
pairsWidget.children[0].options = []
pairsWidget.children[1].options = []
# Since the file has changed, empty the list of pairs
self._parameters[param]['value'] = []
pairsWidget.children[3].options = []
break
self._update()
def _range_handler(self, change):
# type: (dict) -> None
"""Handle the "value" trait change assigning the new value to
the corresponding "int/float range" type parameter.
The update() method is launched at the end to ensure every
widget is updated according to the change in this parameter.
Keyword Arguments:
change -- dict holding the information about the change
"""
key = change['owner'].description
# Both children have the same step
step = self._widgets[key][2].children[0].step
if (change['owner'].min == self._min(key)):
# Trait changed in the widget corresponding to the lower
# bound of the range
self._parameters[key]['value'][0] = change['new']
self._widgets[key][2].children[1].min = change['new'] + step
else:
# Trait changed in the widget corresponding to the upper
# bound of the range
self._parameters[key]['value'][1] = change['new']
self._widgets[key][2].children[0].max = change['new'] - step
self._update()
def _multiselection_handler(self, change):
# type: (dict) -> None
"""Handle the "value" trait change updating the list of values
of the corresponding "multiselection" type parameter.
The update() method is launched at the end to ensure every
widget is updated according to the change in this parameter.
Keyword Arguments:
change -- dict holding the information about the change
"""
key = change['owner'].tag
if (change['new']):
self._parameters[key]['value'].append(change['owner'].description)
else:
self._parameters[key]['value'].remove(change['owner'].description)
self._update()
def _pairs_add_handler(self, button):
# type: (_TaggedButton) -> None
"""Handle when the button is clicked to add a pair to the
corresponding "pairs" type parameter.
The update() method is launched at the end to ensure every
widget is updated according to the change in this parameter.
Keyword Arguments:
button -- clicked button widget instance
"""
key = button.tag
# Add selected elements in both Selection widgets as a new pair
leftSel = self._widgets[key][2].children[0].value
rightSel = self._widgets[key][2].children[1].value
newPair = [leftSel, rightSel]
# The pairs are considered sets, that is, the order of the
# elements is ignored
if ((leftSel != rightSel) and (newPair not in self[key])
and (newPair[::-1] not in self[key])):
self._parameters[key]['value'].append(newPair)
# Since the "options" field is a tuple, build a new list
# with the new pair
self._widgets[key][2].children[3].options = \
[' , '.join(x) for x in self[key]]
self._update()
def _pairs_del_handler(self, button):
# type: (_TaggedButton) -> None
"""Handle when the button is clicked to remove a pair of the
corresponding "pairs" type parameter.
The update() method is launched at the end to ensure every
widget is updated according to the change in this parameter.
Keyword Arguments:
button -- clicked button widget instance
"""
key = button.tag
pairsWidget = self._widgets[key][2].children[3]
# Get the selected pair from the pairs widget
pairSel = pairsWidget.value
if (pairSel):
pair = pairSel.split(' , ')
self._parameters[key]['value'].remove(pair)
# Since the "options" field is a tuple, build a new list
# without the deleted pair
pairsWidget.options = [' , '.join(x) for x in self[key]]
# Select the first pair to ensure coherence with the change
if (pairsWidget.options):
pairsWidget.value = pairsWidget.options[0]
self._update()
def _save_path_handler(self, change):
# type: (dict) -> None
"""Handle the "value" trait change checking if the path where to
save the parameters values exists.
A warning sign will be displayed if the given directory path
does not exist. The update() method is launched at the end to
ensure every widget is updated according to the change in this
parameter.
Keyword Arguments:
change -- dict holding the information about the change
"""
newPath = normalise_path(change['new'])
dirPath = os.path.split(newPath)[0]
if (not os.path.isdir(dirPath)):
self._widgets['save'][2].layout.visibility = 'visible'
else:
self._widgets['save'][2].layout.visibility = 'hidden'
# Replace the introduced path by its normalised version to
# provide the user with more information in case there is
# something wrong
change['owner'].value = newPath
self._update()
def _save_button_handler(self, button):
# type: (widgets.Button) -> None
"""Handle when the button is clicked to save the parameters
values in a JSON file.
Keyword Arguments:
button -- clicked button widget instance
"""
self.write(self._widgets['save'][1].value)
# Change the button's text to tell the user the JSON parameters
# file has been correctly created
button.description = 'Saved'
button.icon = 'check'
def _min(self, key):
# type: (str) -> object
"""Return the largest value in the parameter's "min" list.
Applies round() method to the output of LFParameter._max() to
get a more comparable result regarding floating point arithmetic
issues.
Keyword Arguments:
key -- name of the parameter
"""
return round(LFParameters._min(self, key), self._floatPointPrecision)
def _max(self, key):
# type: (str) -> object
"""Return the smallest value in the parameter's "max" list.
Applies round() method to the output of LFParameter._max() to
get a more comparable result regarding floating point arithmetic
issues.
Keyword Arguments:
key -- name of the parameter
"""
return round(LFParameters._max(self, key), self._floatPointPrecision)
def _valid_parameters(self):
# type: () -> bool
"""Return True if every active parameter has a valid value,
False otherwise.
The list of valid parameters also includes "save" destination
path, where the JSON parameters file will be saved.
"""
enabledKeys = (x for x in self._parameters.keys() if self._is_active(x))
for key in enabledKeys:
data = self._parameters[key]
# Only "multiselection" type parameters can be empty ([])
if ((data['type'] != 'multiselection')
and (data['value'] in [None, '', []])):
return False
# "path" type parameters must be checked manually, whilst
# the rest are already controlled by their widget
if ((data['type'] == 'path') and not os.path.isfile(data['value'])):
return False
# This method is also called when the save interface is being
# created, so the "save" key will not exist yet
if ('save' in self._widgets):
# Check if the directory path where to save the JSON
# parameters file exists
dirPath = os.path.split(self._widgets['save'][1].value)[0]
if (not os.path.isdir(dirPath)):
return False
return True
| mit | -4,986,626,818,987,311,000 | 45.34934 | 80 | 0.596623 | false | 4.573984 | false | false | false |
savioabuga/phoenix-template | phoenix/apps/records/views.py | 1 | 5138 | from django.contrib import messages
from django.shortcuts import HttpResponseRedirect
from django.core.urlresolvers import reverse
from smartmin.views import SmartCRUDL, SmartCreateView, SmartReadView, SmartListView
from phoenix.apps.animals.models import Animal
from phoenix.apps.utils.upload.views import UploadView, UploadListView, UploadDeleteView
from .models import AnimalNote, AnimalDocument
class AnimalDocumentUploadView(UploadView):
model = AnimalDocument
delete_url = 'records.animaldocument_delete'
def get_context_data(self, **kwargs):
context = super(AnimalDocumentUploadView, self).get_context_data(**kwargs)
#context['animal'] = self.request.animal
return context
class AnimalDocumentListView(UploadListView):
model = AnimalDocument
delete_url = 'records.animaldocument_delete'
def get_queryset(self):
return AnimalDocument.objects.all()# filter(animal=self.kwargs['animal_id']).filter(deleted=False)
class AnimalDocumentDeleteView(UploadDeleteView):
model = AnimalDocument
class AnimalNoteCRUDL(SmartCRUDL):
model = AnimalNote
class FormMixin(object):
def __init__(self, **kwargs):
# Prevent cyclic import errors
from .forms import AnimalNoteForm
self.form_class = AnimalNoteForm
super(AnimalNoteCRUDL.FormMixin, self).__init__(**kwargs)
class Create(FormMixin, SmartCreateView):
def get(self, request, *args, **kwargs):
animal_id = request.GET.get('animal', None)
if not animal_id:
messages.warning(request, 'Animal Id is required')
return HttpResponseRedirect(request.META.get('HTTP_REFERER', '/'))
return super(AnimalNoteCRUDL.Create, self).get(request, *args, **kwargs)
def pre_save(self, obj):
animal_id = self.request.GET.get('animal', None)
try:
animal = Animal.objects.get(id=animal_id)
except AnimalNote.DoesNotExist:
messages.error(self.request, 'Animal Id is required')
else:
obj.animal = animal
return obj
def get_success_url(self):
return reverse('animals.animal_read', args=[self.request.GET.get('animal', None)])
class Read(SmartReadView):
fields = ('id', 'date', 'file', 'details', 'created', 'modified')
def get_file(self, obj):
return '<a href=' + obj.file.url + '>' + obj.file.name + '</a>'
class List(SmartListView):
fields = ('id', 'date', 'file', 'details')
def get_file(self, obj):
if obj.file:
return '<a href=' + obj.file.url + '>' + obj.file.name + '</a>'
return ''
def get_queryset(self, **kwargs):
queryset = super(AnimalNoteCRUDL.List, self).get_queryset(**kwargs)
queryset = queryset.filter(animal=self.request.animal)
return queryset
# class AnimalGroupNoteCRUDL(SmartCRUDL):
# model = AnimalGroupNote
#
# class FormMixin(object):
#
# def __init__(self, **kwargs):
# # Prevent cyclic import errors
# from .forms import AnimalGroupNoteForm
# self.form_class = AnimalGroupNoteForm
# super(AnimalGroupNoteCRUDL.FormMixin, self).__init__(**kwargs)
#
# class Create(FormMixin, SmartCreateView):
#
# def get(self, request, *args, **kwargs):
# animalgroup_id = request.GET.get('group', None)
# if not animalgroup_id:
# messages.warning(request, 'Animal Group Id is required')
# return HttpResponseRedirect(request.META.get('HTTP_REFERER', '/'))
# return super(AnimalGroupNoteCRUDL.Create, self).get(request, *args, **kwargs)
#
# def pre_save(self, obj):
# animalgroup_id = self.request.GET.get('group', None)
# try:
# animalgroup = AnimalGroup.objects.get(id=animalgroup_id)
# except AnimalGroup.DoesNotExist:
# messages.error(self.request, 'Animal Id is required')
# else:
# obj.animalgroup = animalgroup
# return obj
#
# def get_success_url(self):
# return reverse('groups.animalgroup_read', args=[self.request.GET.get('group', None)])
#
# class Read(SmartReadView):
# fields = ('id', 'date', 'file', 'details', 'created', 'modified')
#
# def get_file(self, obj):
# if obj.file:
# return '<a href=' + obj.file.url + '>' + obj.file.name + '</a>'
# return ''
#
# class List(SmartListView):
# fields = ('id', 'date', 'file', 'details')
#
# def get_file(self, obj):
# if obj.file:
# return '<a href=' + obj.file.url + '>' + obj.file.name + '</a>'
# return ''
#
# def get_queryset(self, **kwargs):
# queryset = super(AnimalGroupNoteCRUDL.List, self).get_queryset(**kwargs)
# queryset = queryset.filter(animalgroup=self.request.animalgroup)
# return queryset | bsd-3-clause | -1,207,090,966,957,749,500 | 36.510949 | 106 | 0.602374 | false | 3.704398 | false | false | false |
0lidaxiang/WeArt | chapter/view/getChapter.py | 1 | 2117 | #!/usr/bin/python
# -*- coding: UTF-8 -*-
import sys
import json
from django.http import JsonResponse
from django.shortcuts import render
from book.models import book
from chapter.models import chapter
def bookChapter(request):
context = {}
# get the book id of user input if it is not null
if 'idBook' not in request.GET:
context['status'] = "fail"
context['message'] = "The idBook variable is not in request.GET."
return JsonResponse(context)
inputIdBook = request.GET['idBook']
# get the book name of user input if it is not null
# if 'bookName' not in request.GET:
# context['status'] = "fail"
# context['message'] = "The bookName variable is not in request.GET."
# return JsonResponse(context)
# bookName = request.GET['bookName']
bookName = ""
res, status, mes = book.getValue(inputIdBook, "name")
if res:
bookName = mes
else:
print "getchapter bookChapter error" + str(status)
return render(request, 'chapter/bookChapter.html', context={'idBook': inputIdBook,'bookName': bookName})
def getChapter(request):
context = {}
reload(sys)
sys.setdefaultencoding('utf8')
# get the new book name of user input if it is not null
if 'idBook' not in request.GET:
context['status'] = "fail"
context['message'] = "The idBook variable is not in request.GET."
return JsonResponse(context)
inputIdBook = request.GET['idBook']
res, statusNumber, mes = chapter.getAll(inputIdBook)
if not res:
context['status'] = "fail"
context['message'] = "錯誤: " + mes
return JsonResponse(context)
context['status'] = "success"
response_data = []
for m in mes:
response_record = {}
response_record['id'] = m.id
response_record['name'] = m.name
response_record['chapterOrder'] = m.chapterOrder
response_record['book_name'] = book.getValue(m.idBook_id, "name")[2]
response_data.append(response_record)
context["message"] = response_data
return JsonResponse(context)
| bsd-3-clause | 3,381,290,190,217,921,000 | 30.044118 | 108 | 0.639981 | false | 3.789946 | false | false | false |
quasiyoke/RandTalkBot | randtalkbot/stats.py | 1 | 1668 | # RandTalkBot Bot matching you with a random person on Telegram.
# Copyright (C) 2016 quasiyoke
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import datetime
import json
import logging
from peewee import DateTimeField, Model, Proxy, TextField
LOGGER = logging.getLogger('randtalkbot.stats')
def _(string):
return string
DATABASE_PROXY = Proxy()
RATIO_MAX = 10
class Stats(Model):
data_json = TextField()
created = DateTimeField(default=datetime.datetime.utcnow, index=True)
class Meta:
database = DATABASE_PROXY
def __init__(self, *args, **kwargs):
super(Stats, self).__init__(*args, **kwargs)
self._data_cache = None
def get_data(self):
if self._data_cache is None:
self._data_cache = json.loads(self.data_json)
return self._data_cache
def set_data(self, data):
self._data_cache = data
self.data_json = json.dumps(data)
def get_sex_ratio(self):
"""https://en.wikipedia.org/wiki/Human_sex_ratio
Returns:
float: Ratio of males over the females.
"""
try:
sex_data = self.get_data()['sex_distribution']
except (KeyError, TypeError):
return 1
males_count = sex_data.get('male', 0)
females_count = sex_data.get('female', 0)
if males_count > 0 and females_count > 0:
return males_count / females_count
elif males_count > 0:
return RATIO_MAX
elif females_count > 0:
return 1 / RATIO_MAX
return 1
| agpl-3.0 | 3,084,319,754,593,820,700 | 25.47619 | 74 | 0.615707 | false | 3.641921 | false | false | false |
tedlaz/pyted | misthodosia/m13a/f_newCoWizard.py | 1 | 16787 | # -*- coding: utf-8 -*-
'''
Created on 15 Φεβ 2013
@author: tedlaz
'''
sqlco = u"INSERT INTO m12_co VALUES (1,'{0}','{1}','{2}',{3},'{4}','{5}','{6}','{7}','{8}','{9}','{10}','{11}','{12}','{13}')"
from PyQt4 import QtCore, QtGui,Qt
import utils_db,widgets
import osyk
from utils_qt import fFindFromList
import datetime
class NewDbWizard(QtGui.QWizard):
def __init__(self, parent=None):
super(NewDbWizard, self).__init__(parent)
#self.setAttribute(Qt.Qt.WA_DeleteOnClose) Οχι γιατί δημιουργείται πρόβλημα ...
#self.addPage(IntroPage())
self.addPage(coDataPage())
self.addPage(coDataPage2())
self.addPage(filePage())
self.addPage(finalPage())
self.setWizardStyle(QtGui.QWizard.ModernStyle)
self.setOption(QtGui.QWizard.IndependentPages,True)
#self.setPixmap(QtGui.QWizard.BannerPixmap,QtGui.QPixmap(':/banner'))
#self.setPixmap(QtGui.QWizard.BackgroundPixmap, QtGui.QPixmap(':/background'))
self.setWindowTitle(u"Οδηγός Δημιουργίας Νέου Αρχείου Μισθοδοσίας")
def accept(self):
#print '%s %s %s' % (self.field('epon'),self.field('cotyp_id'),self.field('fname'))
fileSql = open(osyk.newDbFile)
script = u''
for lines in fileSql:
script += u'%s' % lines.decode('utf-8')
utils_db.executeScript(script, self.field('fname'))
sqlCo = sqlco.format(self.field('epon'),self.field('onom'),self.field('patr'),self.field('cotyp_id'),
self.field('ame'),self.field('afm'),self.field('doy'),self.field('dra'),
self.field('pol'),self.field('odo'),self.field('num'),self.field('tk'),
self.field('ikac'),self.field('ikap'))
print sqlCo
utils_db.commitToDb(sqlCo, self.field('fname'))
sqlCoy = u"INSERT INTO m12_coy VALUES (1,1,'Κεντρικό','%s')" % self.field('kad')
utils_db.commitToDb(sqlCoy, self.field('fname'))
etos = datetime.datetime.now().year
utils_db.commitToDb(u"INSERT INTO m12_xrisi (xrisi,xrisip) VALUES ('{0}','Χρήση {0}')".format(etos), self.field('fname'))
eidList = osyk.eid_cad_listFilteredDouble(self.field('kad'))
#print eidList
sqleid_ = u"INSERT INTO m12_eid (eidp,keid) VALUES ('{0}','{1}');\n"
sqleid = u''
for el in eidList:
sqleid += sqleid_.format(el[1],el[0])
utils_db.executeScript(sqleid,self.field('fname'))
super(NewDbWizard, self).accept()
class IntroPage(QtGui.QWizardPage):
def __init__(self, parent=None):
super(IntroPage, self).__init__(parent)
self.setTitle(u"Οδηγίες")
#self.setPixmap(QtGui.QWizard.WatermarkPixmap, QtGui.QPixmap(':/watermark1'))
label = QtGui.QLabel(u"Αυτός ο οδηγός θα δημιουργήσει νέο Αρχείο Μισθοδοσίας.\n\n "
u"Εσείς θα πρέπει απλά να εισάγετε τις απαραίτητες παραμέτρους "
u"καθώς και το όνομα του αρχείου και το σημείο αποθήκευσης.\n\n"
u"Μπορείτε σε κάθε βήμα να αναθεωρήσετε και να επιστρέψετε.\n\n"
u"Πατήστε δημιουργία στην τελευταία οθόνη για να ολοκληρώσετε.")
label.setWordWrap(True)
layout = QtGui.QVBoxLayout()
layout.addWidget(label)
self.setLayout(layout)
class coDataPage(QtGui.QWizardPage):
def __init__(self, parent=None):
super(coDataPage, self).__init__(parent)
#parent.button(QtGui.QWizard.BackButton).setVisible(False)
#self.buttonText(QtGui.QWizard.NextButton)
self.setButtonText(QtGui.QWizard.BackButton,u'< Πίσω')
self.setButtonText(QtGui.QWizard.NextButton,u'Επόμενο >')
self.setButtonText(QtGui.QWizard.CancelButton,u'Ακύρωση')
self.setTitle(u"Πληροφορίες εταιρίας")
self.setSubTitle(u"Συμπληρώστε τα βασικά στοιχεία της εταιρίας")
#self.setPixmap(QtGui.QWizard.LogoPixmap, QtGui.QPixmap(':/logo1'))
cotypLabel = QtGui.QLabel(u"Τύπος επιχείρησης:")
cotyp = widgets.DbComboBox([[1,u'Νομικό Πρόσωπο'],[2,u'Φυσικό Πρόσωπο']])
cotypLabel.setBuddy(cotyp)
eponNameLabel = QtGui.QLabel(u"Επωνυμία:")
eponNameLineEdit = QtGui.QLineEdit()
eponNameLabel.setBuddy(eponNameLineEdit)
onomLabel = QtGui.QLabel(u"Όνομα (Για φυσικά πρόσωπα):")
onomLineEdit = QtGui.QLineEdit()
onomLineEdit.setDisabled(True)
onomLabel.setBuddy(onomLineEdit)
patrLabel = QtGui.QLabel(u"Πατρώνυμο (Για φυσικά πρόσωπα):")
patrLineEdit = QtGui.QLineEdit()
patrLineEdit.setDisabled(True)
patrLabel.setBuddy(patrLineEdit)
cotypValue = QtGui.QLineEdit()
cotypValue.setText('1')
def onCotypActivated():
if cotyp.currentIndex() ==1:
onomLineEdit.setDisabled(False)
patrLineEdit.setDisabled(False)
cotypValue.setText('2')
else:
onomLineEdit.setText('')
patrLineEdit.setText('')
onomLineEdit.setDisabled(True)
patrLineEdit.setDisabled(True)
cotypValue.setText('1')
cotyp.activated.connect(onCotypActivated)
kadLabel = QtGui.QLabel(u"Κωδικός αρ.Δραστηριότητας:")
kadLineEdit = QtGui.QLineEdit()
kadLabel.setBuddy(kadLineEdit)
kadLineEdit.setReadOnly(True)
kadFindButton = QtGui.QPushButton(u'Εύρεση ΚΑΔ')
kadLayout = QtGui.QHBoxLayout()
kadLayout.addWidget(kadLineEdit)
kadLayout.addWidget(kadFindButton)
kadpLabel = QtGui.QLabel(u"Περιγραφή αρ.Δραστηριότητας:")
kadpTextEdit = QtGui.QTextEdit()
kadpLabel.setBuddy(kadpTextEdit)
kadpTextEdit.setReadOnly(True)
draLabel = QtGui.QLabel(u"Συντομογραφία Δραστηριότητας:")
draLineEdit = QtGui.QLineEdit()
draLabel.setBuddy(draLineEdit)
def openFindDlg():
kadList = osyk.cad_list()
head = [u'ΚΑΔ',u'Περιγραφή']
cw = [35,300]
form = fFindFromList(kadList,head,cw)
if form.exec_() == QtGui.QDialog.Accepted:
kadLineEdit.setText(form.array[0])
kadpTextEdit.setText(form.array[1])
kadFindButton.clicked.connect(openFindDlg)
self.registerField('cotyp_id',cotypValue)
self.registerField('epon*', eponNameLineEdit)
self.registerField('onom', onomLineEdit)
self.registerField('patr', patrLineEdit)
self.registerField('kad*', kadLineEdit)
self.registerField('dra*', draLineEdit)
#self.registerField('kadt*', kadpTextEdit)
layout = QtGui.QGridLayout()
layout.addWidget(cotypLabel, 0, 0)
layout.addWidget(cotyp, 0, 1)
layout.addWidget(eponNameLabel, 1, 0)
layout.addWidget(eponNameLineEdit, 1, 1)
layout.addWidget(onomLabel, 2, 0)
layout.addWidget(onomLineEdit, 2, 1)
layout.addWidget(patrLabel, 3, 0)
layout.addWidget(patrLineEdit, 3, 1)
layout.addWidget(kadLabel, 4, 0)
layout.addLayout(kadLayout, 4, 1)
layout.addWidget(kadpLabel,5, 0)
layout.addWidget(kadpTextEdit, 5, 1,2,1)
layout.addWidget(draLabel,7, 0)
layout.addWidget(draLineEdit,7, 1)
self.setLayout(layout)
class coDataPage2(QtGui.QWizardPage):
def __init__(self, parent=None):
super(coDataPage2, self).__init__(parent)
self.setButtonText(QtGui.QWizard.BackButton,u'< Πίσω')
self.setButtonText(QtGui.QWizard.NextButton,u'Επόμενο >')
self.setButtonText(QtGui.QWizard.CancelButton,u'Ακύρωση')
self.setTitle(u"Πληροφορίες εταιρίας")
self.setSubTitle(u"Συμπληρώστε τα υπόλοιπα στοιχεία της εταιρίας")
afmLabel = QtGui.QLabel(u"ΑΦΜ:")
afmLineEdit = QtGui.QLineEdit()
afmLabel.setBuddy(afmLineEdit)
doyLabel = QtGui.QLabel(u"ΔΟΥ:")
doyLineEdit = QtGui.QLineEdit()
doyLabel.setBuddy(doyLineEdit)
doyLineEdit.setReadOnly(True)
doyFindButton = QtGui.QPushButton(u'...')
doyFindButton.setMaximumSize(QtCore.QSize(20, 50))
doyLayout = QtGui.QHBoxLayout()
doyLayout.addWidget(doyLineEdit)
doyLayout.addWidget(doyFindButton)
def openFindDlg():
head = [u'Κωδ',u'ΔΟΥ']
cw = [35,300]
form = fFindFromList(osyk.doy_list(),head,cw)
if form.exec_() == QtGui.QDialog.Accepted:
doyLineEdit.setText(form.array[1])
doyFindButton.clicked.connect(openFindDlg)
poliLabel = QtGui.QLabel(u"Πόλη:")
poliLineEdit = QtGui.QLineEdit()
poliLabel.setBuddy(poliLineEdit)
tkLabel = QtGui.QLabel(u"Ταχ.Κωδικός:")
tkLineEdit = QtGui.QLineEdit()
tkLabel.setBuddy(tkLineEdit)
odosLabel = QtGui.QLabel(u"Οδός:")
odosLineEdit = QtGui.QLineEdit()
odosLabel.setBuddy(odosLineEdit)
numLabel = QtGui.QLabel(u"Αριθμός:")
numLineEdit = QtGui.QLineEdit()
numLabel.setBuddy(numLineEdit)
ameLabel = QtGui.QLabel(u"Αρ.Μητρ.ΙΚΑ:")
ameLineEdit = QtGui.QLineEdit()
ameLabel.setBuddy(ameLineEdit)
ikacLabel = QtGui.QLabel(u"Κωδ.ΙΚΑ:")
ikacLineEdit = QtGui.QLineEdit()
ikacLabel.setBuddy(ikacLineEdit)
ikacLineEdit.setReadOnly(True)
ikaLabel = QtGui.QLabel(u"Υπ/μα.ΙΚΑ:")
ikaLineEdit = QtGui.QLineEdit()
ikaLabel.setBuddy(ikaLineEdit)
ikaLineEdit.setReadOnly(True)
ikaFindButton = QtGui.QPushButton(u'...')
ikaFindButton.setMaximumSize(QtCore.QSize(20, 50))
ikaLayout = QtGui.QHBoxLayout()
ikaLayout.addWidget(ikaLineEdit)
ikaLayout.addWidget(ikaFindButton)
def openFindDlgIKA():
head = [u'Κωδ',u'Υποκατάστημα ΙΚΑ']
cw = [35,300]
form = fFindFromList(osyk.ika_list(),head,cw)
if form.exec_() == QtGui.QDialog.Accepted:
ikacLineEdit.setText(form.array[0])
ikaLineEdit.setText(form.array[1])
ikaFindButton.clicked.connect(openFindDlgIKA)
self.registerField('afm*',afmLineEdit)
self.registerField('doy*',doyLineEdit)
self.registerField('pol*',poliLineEdit)
self.registerField('odo',odosLineEdit)
self.registerField('num',numLineEdit)
self.registerField('tk',tkLineEdit)
self.registerField('ikac*',ikacLineEdit)
self.registerField('ikap*',ikaLineEdit)
self.registerField('ame*',ameLineEdit)
layout = QtGui.QGridLayout()
layout.addWidget(afmLabel, 0, 0)
layout.addWidget(afmLineEdit, 0, 1)
layout.addWidget(doyLabel, 0, 2)
layout.addLayout(doyLayout, 0, 3)
layout.addWidget(poliLabel, 1, 0)
layout.addWidget(poliLineEdit, 1, 1)
layout.addWidget(tkLabel, 1, 2)
layout.addWidget(tkLineEdit, 1, 3)
layout.addWidget(odosLabel, 2, 0)
layout.addWidget(odosLineEdit, 2, 1)
layout.addWidget(numLabel, 2, 2)
layout.addWidget(numLineEdit, 2, 3)
layout.addWidget(ameLabel, 3, 0)
layout.addWidget(ameLineEdit, 3, 1)
layout.addWidget(ikacLabel, 4, 0)
layout.addWidget(ikacLineEdit, 4, 1)
layout.addWidget(ikaLabel, 4, 2)
layout.addLayout(ikaLayout, 4, 3)
self.setLayout(layout)
class filePage(QtGui.QWizardPage):
def __init__(self, parent=None):
super(filePage, self).__init__(parent)
self.setButtonText(QtGui.QWizard.BackButton,u'< Πίσω')
self.setButtonText(QtGui.QWizard.NextButton,u'Επόμενο >')
self.setButtonText(QtGui.QWizard.CancelButton,u'Ακύρωση')
self.setTitle(u"Όνομα αρχείου")
self.setSubTitle(u"Δώστε όνομα και περιοχή αποθήκευσης")
#self.setPixmap(QtGui.QWizard.LogoPixmap, QtGui.QPixmap(':/logo1'))
fileNameLabel = QtGui.QLabel(u"Όνομα αρχείου:")
self.fileNameLineEdit = QtGui.QLineEdit()
self.fileNameLineEdit.setReadOnly(True)
fileNameLabel.setBuddy(self.fileNameLineEdit)
butFile = QtGui.QPushButton(u'...')
butFile.clicked.connect(self.fSave)
fileLayout = QtGui.QHBoxLayout()
fileLayout.addWidget(self.fileNameLineEdit)
fileLayout.addWidget(butFile)
patrLabel = QtGui.QLabel(u"Πατρώνυμο (Για φυσικά πρόσωπα):")
patrLineEdit = QtGui.QLineEdit()
patrLabel.setBuddy(patrLineEdit)
cotypLabel = QtGui.QLabel(u"Τύπος επιχείρησης:")
cotyp = QtGui.QComboBox()
cotypLabel.setBuddy(cotyp)
cotyp.addItems([u'1.Νομικό Πρόσωπο',u'2.Φυσικό Πρόσωπο'])
self.registerField('fname*', self.fileNameLineEdit)
layout = QtGui.QGridLayout()
layout.addWidget(fileNameLabel, 0, 0)
layout.addLayout(fileLayout, 0, 1)
self.setLayout(layout)
def fSave(self):
fileName = QtGui.QFileDialog.getSaveFileName(self,
"QFileDialog.getSaveFileName()",
self.field('fname'),
"payroll m13 (*.m13)", QtGui.QFileDialog.Options())
if fileName:
self.fileNameLineEdit.setText(fileName)
class finalPage(QtGui.QWizardPage):
def __init__(self, parent=None):
super(finalPage, self).__init__(parent)
self.setButtonText(QtGui.QWizard.BackButton,u'< Πίσω')
self.setButtonText(QtGui.QWizard.FinishButton,u'Ολοκλήρωση')
self.setButtonText(QtGui.QWizard.CancelButton,u'Ακύρωση')
self.setTitle(u"Δημιουργία αρχείου ")
#self.setPixmap(QtGui.QWizard.WatermarkPixmap, QtGui.QPixmap(':/watermark2'))
self.label = QtGui.QLabel()
self.label.setWordWrap(True)
layout = QtGui.QVBoxLayout()
layout.addWidget(self.label)
self.setLayout(layout)
def initializePage(self):
finishText = self.wizard().buttonText(QtGui.QWizard.FinishButton)
finishText.replace('&', '')
txt = u'Προσοχή , θα δημιουργηθεί αρχείο μισθοδοσίας με τις παρακάτω παραμέτρους :\n\n'
txt += u'Στοιχεία Επιχείρησης : %s \n\n' % self.field('epon')
txt += u'Όνομα Αρχείου : %s \n\n' % self.field('fname')
txt += u"\nΠατήστε %s για να ολοκληρωθεί η διαδικασία." % finishText
txt += u"\n\nΜε την ολοκλήρωση της διαδικασίας το νέο αρχείο είναι έτοιμο για εισαγωγή δεδομένων!!!"
self.label.setText(txt)
if __name__ == '__main__':
import sys
app = QtGui.QApplication(sys.argv)
wizard = NewDbWizard()
wizard.show()
sys.exit(app.exec_())
| gpl-3.0 | 6,554,847,959,938,570,000 | 37.832061 | 129 | 0.590009 | false | 2.875459 | false | false | false |
denex/snafucator | Python/pin/pin_generator.py | 1 | 1028 | def _create_pins():
"""
:rtype: Iterable[int]
"""
middle = 5000
for i in range(0, 2 * middle):
if i % 2 == 0:
yield middle - i // 2 - 1
else:
yield middle + i // 2
PINS = tuple(_create_pins())
assert len(PINS) == 10000, "Len = %d" % len(PINS)
assert min(PINS) == 0000
assert max(PINS) == 9999
def get_pin_index(pin):
return 0 if pin is None else PINS.index(pin)
def pin_generator(last_pin=None):
"""
:type last_pin: int or None
:rtype: Iterable[int]
"""
start_pos = get_pin_index(last_pin) + 1 if last_pin is not None else 0
for i in range(start_pos, len(PINS)):
yield PINS[i]
def test_selector():
print(get_pin_index(6000))
l1 = list(pin_generator(last_pin=9997))
assert len(frozenset(l1)) == 4
l2 = list(pin_generator(last_pin=4999))
assert len(frozenset(l2)) == 9999
l3 = list(pin_generator(last_pin=5000))
assert len(frozenset(l3)) == 9998
if __name__ == '__main__':
test_selector()
| gpl-3.0 | 7,676,437,677,112,655,000 | 20.87234 | 74 | 0.571984 | false | 3.005848 | false | false | false |
nodesign/weioMinima | weioLib/weioParser.py | 1 | 10962 | ###
#
# WEIO Web Of Things Platform
# Copyright (C) 2013 Nodesign.net, Uros PETREVSKI, Drasko DRASKOVIC
# All rights reserved
#
# ## ## ######## #### #######
# ## ## ## ## ## ## ##
# ## ## ## ## ## ## ##
# ## ## ## ###### ## ## ##
# ## ## ## ## ## ## ##
# ## ## ## ## ## ## ##
# ### ### ######## #### #######
#
# Web Of Things Platform
#
# This file is part of WEIO and is published under BSD license.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. All advertising materials mentioning features or use of this software
# must display the following acknowledgement:
# This product includes software developed by the WeIO project.
# 4. Neither the name of the WeIO nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY WEIO PROJECT AUTHORS AND CONTRIBUTORS ''AS IS'' AND ANY
# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL WEIO PROJECT AUTHORS AND CONTRIBUTORS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors :
# Uros PETREVSKI <uros@nodesign.net>
# Drasko DRASKOVIC <drasko.draskovic@gmail.com>
#
###
from weioLib.weioIO import *
from weioUserApi import serverPush
from weioLib import weioRunnerGlobals
import platform, sys
# WeIO API bindings from websocket to lower levels
# Each data argument is array of data
# Return value is dictionary
def callPinMode(data) :
if (weioRunnerGlobals.WEIO_SERIAL_LINKED is True):
pinMode(data[0],data[1])
else :
print "pinMode ON PC", data
return None
def callPortMode(data) :
if (weioRunnerGlobals.WEIO_SERIAL_LINKED is True):
portMode(data[0],data[1])
else :
print "pinMode ON PC", data
return None
def callDigitalWrite(data) :
if (weioRunnerGlobals.WEIO_SERIAL_LINKED is True):
digitalWrite(data[0], data[1])
else :
print "digitalWrite ON PC", data
return None
def callDigitalRead(data) :
bck = {}
if (weioRunnerGlobals.WEIO_SERIAL_LINKED is True):
value = digitalRead(data[0])
bck["data"] = value
bck["pin"] = data[0]
else :
print "digitalRead ON PC", data
bck["data"] = 1 # faked value
bck["pin"] = data[0] # pin
return bck
def callPulseIn(data) :
bck = {}
if (weioRunnerGlobals.WEIO_SERIAL_LINKED is True):
value = pulseIn(data[0], data[1], data[2])
bck["data"] = value
bck["pin"] = data[0]
bck["level"] = data[1]
bck["timeout"] = data[1]
else :
print "pulseIn ON PC", data
bck["data"] = 1 # faked value
bck["pin"] = data[0] # pin
bck["level"] = data[1] # level
bck["timeout"] = data[2] # timeout
return bck
def callPortWrite(data) :
if (weioRunnerGlobals.WEIO_SERIAL_LINKED is True):
portWrite(data[0], data[1])
else :
print "portWrite ON PC", data
return None
def callPortRead(data) :
bck = {}
if (weioRunnerGlobals.WEIO_SERIAL_LINKED is True):
value = portRead(data[0])
bck["data"] = value
bck["port"] = data[0]
else :
print "digitalRead ON PC", data
bck["data"] = 1 # faked value
bck["port"] = data[0] # pin
return bck
def callDHTRead(data) :
if (weioRunnerGlobals.WEIO_SERIAL_LINKED is True):
dhtRead(data[0])
else :
print "dhtRead ON PC", data
return None
def callAnalogRead(data) :
bck = {}
if (weioRunnerGlobals.WEIO_SERIAL_LINKED is True):
#print "From browser ", data
value = analogRead(data[0]) # this is pin number
bck["data"] = value
bck["pin"] = data[0]
else :
print "analogRead ON PC", data
bck["data"] = 1023 # faked value
bck["pin"] = data[0]
return bck
def callSetPwmPeriod(data) :
if (weioRunnerGlobals.WEIO_SERIAL_LINKED is True):
setPwmPeriod(data[0],data[1])
else:
print "setPwmPeriod ON PC", data
return None
# def callSetPwmLimit(data) :
# if (weioRunnerGlobals.WEIO_SERIAL_LINKED is True):
# setPwmLimit(data[0])
# else:
# print "setPwmLimit ON PC", data
# return None
def callPwmWrite(data) :
if (weioRunnerGlobals.WEIO_SERIAL_LINKED is True):
pwmWrite(data[0], data[1])
else :
print "pwmWrite ON PC", data
return None
def callProportion(data) :
bck = {}
if (weioRunnerGlobals.WEIO_SERIAL_LINKED is True):
#print "From browser ", data
value = proportion(data[0],data[1],data[2],data[3],data[4])
bck["data"] = value
else :
print "proportion ON PC", data
bck["data"] = data
return bck
def callAttachInterrupt(data) :
if (weioRunnerGlobals.WEIO_SERIAL_LINKED is True):
iObj = {"pin" : data[0], "jsCallbackString" : data[2]}
attachInterrupt(data[0], data[1], genericInterrupt, iObj)
else:
print "attachInterrupt ON PC", data
return None
def callDetachInterrupt(data) :
if (weioRunnerGlobals.WEIO_SERIAL_LINKED is True):
detachInterrupt(data[0])
else:
print "detachInterrupt ON PC", data
return None
def genericInterrupt(event, obj):
bck = {}
bck["data"] = obj["pin"]
bck["eventType"] = getInterruptType(event["type"])
serverPush(obj["jsCallbackString"], bck)
def callDelay(data) :
if (weioRunnerGlobals.WEIO_SERIAL_LINKED is True):
delay(data[0])
else :
print "delay ON PC", data
return None
def callTone(data) :
if (weioRunnerGlobals.WEIO_SERIAL_LINKED is True):
print "TONE VALS", len(data)
if (len(data)==2):
tone(data[0], data[1])
elif (len(data)==3):
tone(data[0], data[1], data[2])
else :
print "tone ON PC", data
return None
def callNotone(data) :
if (weioRunnerGlobals.WEIO_SERIAL_LINKED is True):
noTone(data[0])
else :
print "notone ON PC", data
return None
def callConstrain(data) :
if (weioRunnerGlobals.WEIO_SERIAL_LINKED is True):
constrain(data[0], data[1], data[2],)
bck["data"] = value
else :
print "contrain ON PC", data
bck["data"] = 1 # faked value
bck["pin"] = data[0] # pin
return bck
def callMillis(data) :
bck = {}
if (weioRunnerGlobals.WEIO_SERIAL_LINKED is True):
value = millis()
bck["data"] = value
else :
print "millis ON PC", data
bck["data"] = 0 # faked value
return bck
def callGetTemperature(data):
bck = {}
if (weioRunnerGlobals.WEIO_SERIAL_LINKED is True):
value = getTemperature()
bck["data"] = value
else :
print "getTemperature ON PC", data
bck["data"] = 0 # faked value
return bck
def callUserMesage(data):
print "USER TALKS", data
#weioRunnerGlobals.userMain
def pinsInfo(data) :
bck = {}
bck["data"] = weioRunnerGlobals.DECLARED_PINS
#print("GET PIN INFO ASKED!", bck["data"])
return bck
def callListSerials(data):
bck = {}
bck["data"] = listSerials()
return bck
# UART SECTION
clientSerial = None
def callInitSerial(data):
global clientSerial
if (clientSerial is None) :
clientSerial = initSerial(data[0], data[1])
def callSerialWrite(data):
global clientSerial
if not(clientSerial is None) :
clientSerial.write(data)
else :
sys.stderr.write("Serial port is not initialized. Use initSerial function first")
def callSerialRead(data):
global clientSerial
bck = {}
if not(clientSerial is None) :
bck["data"] = clientSerial.read()
else :
sys.stderr.write("Serial port is not initialized. Use initSerial function first")
return bck
# SPI SECTION
SPI = None
def callInitSPI(data):
global SPI
if (SPI is None) :
SPI = initSPI(data[0])
def callWriteSPI(data):
global SPI
if not(SPI is None) :
SPI.write(data[0])
else :
sys.stderr.write("SPI port is not initialized. Use initSerial function first")
def callReadSPI(data):
global SPI
bck = {}
if not(SPI is None) :
bck["data"] = SPI.read(data[0])
else :
sys.stderr.write("SPI port is not initialized. Use initSerial function first")
return bck
###
# WeIO native spells
###
weioSpells = {
"digitalWrite" :callDigitalWrite,
"digitalRead" :callDigitalRead,
"pulseIn" :callPulseIn,
"portWrite" :callPortWrite,
"portRead" :callPortRead,
"dhtRead" :callDHTRead,
"analogRead" :callAnalogRead,
"pinMode" :callPinMode,
"portMode" :callPortMode,
"setPwmPeriod" :callSetPwmPeriod,
"pwmWrite" :callPwmWrite,
"proportion" :callProportion,
"attachInterrupt" :callAttachInterrupt,
"detachInterrupt" :callDetachInterrupt,
"tone" :callTone,
"noTone" :callNotone,
"constrain" :callConstrain,
"millis" :callMillis,
"getTemperature" :callGetTemperature,
"delay" :callDelay,
"pinsInfo" :pinsInfo,
"listSerials" :callListSerials,
"initSerial" :callInitSerial,
"serialWrite" :callSerialWrite,
"initSPI" :callInitSPI,
"readSPI" :callReadSPI,
"writeSPI" :callWriteSPI
# "message":callUserMesage
}
###
# User added spells (handlers)
###
weioUserSpells = {}
def addUserEvent(event, handler):
global weioUserSpells
#print "Adding event ", event
#print "and handler ", handler
weioUserSpells[event] = handler
def removeUserEvents():
global weioUserSpells
weioUserSpells.clear()
| bsd-3-clause | -1,069,625,149,442,586,900 | 29.032877 | 89 | 0.614213 | false | 3.386469 | false | false | false |
wahaha02/myblog | blog/templatetags/highlight.py | 1 | 1181 | #!/usr/bin/env python
# -*- coding: UTF-8 -*-
from django.template import Library
import re
DEBUG = False
register = Library()
@register.filter
def highlight_format(value):
p_sub = re.compile('__codestart__ (\w+)')
value = p_sub.sub(r'<pre name="code" class="\g<1>">', value)
p_sub = re.compile(r'__codeend__', re.VERBOSE)
value = p_sub.sub(r'</pre>', value)
if DEBUG:
print value
print '+' * 80
p_highlight = re.compile(r'(<pre name="code" class="\w+">)(?P<codeblock>.*)(</pre>)', re.S)
f_list = p_highlight.findall(value)
if f_list:
s_list = p_highlight.split(value)
if DEBUG:
for i in s_list:
print i
print '=' * 80
for code_block in p_highlight.finditer(value):
code = code_block.group('codeblock')
index = s_list.index(code)
code = code.replace('<', '<')
code = code.replace('>', '>')
code = code.replace('&', '&')
code = code.replace('<p>', '')
code = code.replace('</p>', '')
s_list[index] = code
value = ''.join(s_list)
return value
| bsd-3-clause | 7,682,443,373,309,620,000 | 28.525 | 95 | 0.515665 | false | 3.473529 | false | false | false |
OmkarPathak/Python-Programs | CompetitiveProgramming/HackerEarth/Algorithms/String/P11_CaesarsCipher.py | 1 | 2255 | # Caesar's Cipher is a very famous encryption technique used in cryptography. It is a type of substitution
# cipher in which each letter in the plaintext is replaced by a letter some fixed number of positions down
# the alphabet. For example, with a shift of 3, D would be replaced by G, E would become H, X would become A
# and so on.
#
# Encryption of a letter X by a shift K can be described mathematically as
# EK(X)=(X+K) % 26.
#
# Given a plaintext and it's corresponding ciphertext, output the minimum non-negative value of shift that was
# used to encrypt the plaintext or else output −1 if it is not possible to obtain the given ciphertext from
# the given plaintext using Caesar's Cipher technique.
#
# Input:
#
# The first line of the input contains Q, denoting the number of queries.
#
# The next Q lines contain two strings S and T consisting of only upper-case letters.
#
# Output:
#
# For each test-case, output a single non-negative integer denoting the minimum value of shift that was used
# to encrypt the plaintext or else print −1 if the answer doesn't exist.
#
# Constraints:
# 1≤Q≤5
# 1≤|S|≤10^5
# 1≤|T|≤10^5
# |S| = |T|
#
# SAMPLE INPUT
# 2
# ABC
# DEF
# AAA
# PQR
#
# SAMPLE OUTPUT
# 3
# -1
# My Solution
for _ in range(int(input())):
string_one = input()
string_two= input()
check_one = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
# ZYXWVUTSRQPONMLKJIHGFEDCBA
check_two = check_one[::-1]
result = []
for i in range(len(string_one)):
if(check_one.find(string_one[i]) > check_one.find(string_two[i])):
result.append(check_two.find(string_one[i]) + check_one.find(string_two[i]) + 1)
else:
result.append(check_one.find(string_two[i]) - check_one.find(string_one[i]))
if result.count(result[0]) == len(string_one):
print(result[0])
else:
print(-1)
# More Efficient Solution:
tests = int(input().strip())
for i in range(tests):
plain = input().strip()
cipher = input().strip()
shift = (ord(cipher[0])-ord(plain[0])+26)%26
valid = True
for j in range(len(plain)):
if (ord(cipher[j])-ord(plain[j])+26)%26 != shift:
valid = False
break
print(shift) if valid else print("-1")
| gpl-3.0 | -6,465,062,925,664,911,000 | 30.535211 | 110 | 0.659669 | false | 3.203147 | false | false | false |
lmprice/ansible | lib/ansible/playbook/role/__init__.py | 13 | 18914 | # (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import collections
import os
from ansible.errors import AnsibleError, AnsibleParserError, AnsibleAssertionError
from ansible.module_utils.six import iteritems, binary_type, text_type
from ansible.playbook.attribute import FieldAttribute
from ansible.playbook.base import Base
from ansible.playbook.become import Become
from ansible.playbook.conditional import Conditional
from ansible.playbook.helpers import load_list_of_blocks
from ansible.playbook.role.metadata import RoleMetadata
from ansible.playbook.taggable import Taggable
from ansible.plugins.loader import get_all_plugin_loaders
from ansible.utils.vars import combine_vars
__all__ = ['Role', 'hash_params']
# TODO: this should be a utility function, but can't be a member of
# the role due to the fact that it would require the use of self
# in a static method. This is also used in the base class for
# strategies (ansible/plugins/strategy/__init__.py)
def hash_params(params):
"""
Construct a data structure of parameters that is hashable.
This requires changing any mutable data structures into immutable ones.
We chose a frozenset because role parameters have to be unique.
.. warning:: this does not handle unhashable scalars. Two things
mitigate that limitation:
1) There shouldn't be any unhashable scalars specified in the yaml
2) Our only choice would be to return an error anyway.
"""
# Any container is unhashable if it contains unhashable items (for
# instance, tuple() is a Hashable subclass but if it contains a dict, it
# cannot be hashed)
if isinstance(params, collections.Container) and not isinstance(params, (text_type, binary_type)):
if isinstance(params, collections.Mapping):
try:
# Optimistically hope the contents are all hashable
new_params = frozenset(params.items())
except TypeError:
new_params = set()
for k, v in params.items():
# Hash each entry individually
new_params.update((k, hash_params(v)))
new_params = frozenset(new_params)
elif isinstance(params, (collections.Set, collections.Sequence)):
try:
# Optimistically hope the contents are all hashable
new_params = frozenset(params)
except TypeError:
new_params = set()
for v in params:
# Hash each entry individually
new_params.update(hash_params(v))
new_params = frozenset(new_params)
else:
# This is just a guess.
new_params = frozenset(params)
return new_params
# Note: We do not handle unhashable scalars but our only choice would be
# to raise an error there anyway.
return frozenset((params,))
class Role(Base, Become, Conditional, Taggable):
_delegate_to = FieldAttribute(isa='string')
_delegate_facts = FieldAttribute(isa='bool', default=False)
def __init__(self, play=None, from_files=None):
self._role_name = None
self._role_path = None
self._role_params = dict()
self._loader = None
self._metadata = None
self._play = play
self._parents = []
self._dependencies = []
self._task_blocks = []
self._handler_blocks = []
self._default_vars = dict()
self._role_vars = dict()
self._had_task_run = dict()
self._completed = dict()
if from_files is None:
from_files = {}
self._from_files = from_files
super(Role, self).__init__()
def __repr__(self):
return self.get_name()
def get_name(self):
return self._role_name
@staticmethod
def load(role_include, play, parent_role=None, from_files=None):
if from_files is None:
from_files = {}
try:
# The ROLE_CACHE is a dictionary of role names, with each entry
# containing another dictionary corresponding to a set of parameters
# specified for a role as the key and the Role() object itself.
# We use frozenset to make the dictionary hashable.
params = role_include.get_role_params()
if role_include.when is not None:
params['when'] = role_include.when
if role_include.tags is not None:
params['tags'] = role_include.tags
if from_files is not None:
params['from_files'] = from_files
if role_include.vars:
params['vars'] = role_include.vars
hashed_params = hash_params(params)
if role_include.role in play.ROLE_CACHE:
for (entry, role_obj) in iteritems(play.ROLE_CACHE[role_include.role]):
if hashed_params == entry:
if parent_role:
role_obj.add_parent(parent_role)
return role_obj
r = Role(play=play, from_files=from_files)
r._load_role_data(role_include, parent_role=parent_role)
if role_include.role not in play.ROLE_CACHE:
play.ROLE_CACHE[role_include.role] = dict()
play.ROLE_CACHE[role_include.role][hashed_params] = r
return r
except RuntimeError:
raise AnsibleError("A recursion loop was detected with the roles specified. Make sure child roles do not have dependencies on parent roles",
obj=role_include._ds)
def _load_role_data(self, role_include, parent_role=None):
self._role_name = role_include.role
self._role_path = role_include.get_role_path()
self._role_params = role_include.get_role_params()
self._variable_manager = role_include.get_variable_manager()
self._loader = role_include.get_loader()
if parent_role:
self.add_parent(parent_role)
# copy over all field attributes, except for when and tags, which
# are special cases and need to preserve pre-existing values
for (attr_name, _) in iteritems(self._valid_attrs):
if attr_name not in ('when', 'tags'):
setattr(self, attr_name, getattr(role_include, attr_name))
current_when = getattr(self, 'when')[:]
current_when.extend(role_include.when)
setattr(self, 'when', current_when)
current_tags = getattr(self, 'tags')[:]
current_tags.extend(role_include.tags)
setattr(self, 'tags', current_tags)
# dynamically load any plugins from the role directory
for name, obj in get_all_plugin_loaders():
if obj.subdir:
plugin_path = os.path.join(self._role_path, obj.subdir)
if os.path.isdir(plugin_path):
obj.add_directory(plugin_path)
# load the role's other files, if they exist
metadata = self._load_role_yaml('meta')
if metadata:
self._metadata = RoleMetadata.load(metadata, owner=self, variable_manager=self._variable_manager, loader=self._loader)
self._dependencies = self._load_dependencies()
else:
self._metadata = RoleMetadata()
task_data = self._load_role_yaml('tasks', main=self._from_files.get('tasks'))
if task_data:
try:
self._task_blocks = load_list_of_blocks(task_data, play=self._play, role=self, loader=self._loader, variable_manager=self._variable_manager)
except AssertionError as e:
raise AnsibleParserError("The tasks/main.yml file for role '%s' must contain a list of tasks" % self._role_name,
obj=task_data, orig_exc=e)
handler_data = self._load_role_yaml('handlers')
if handler_data:
try:
self._handler_blocks = load_list_of_blocks(handler_data, play=self._play, role=self, use_handlers=True, loader=self._loader,
variable_manager=self._variable_manager)
except AssertionError as e:
raise AnsibleParserError("The handlers/main.yml file for role '%s' must contain a list of tasks" % self._role_name,
obj=handler_data, orig_exc=e)
# vars and default vars are regular dictionaries
self._role_vars = self._load_role_yaml('vars', main=self._from_files.get('vars'), allow_dir=True)
if self._role_vars is None:
self._role_vars = dict()
elif not isinstance(self._role_vars, dict):
raise AnsibleParserError("The vars/main.yml file for role '%s' must contain a dictionary of variables" % self._role_name)
self._default_vars = self._load_role_yaml('defaults', main=self._from_files.get('defaults'), allow_dir=True)
if self._default_vars is None:
self._default_vars = dict()
elif not isinstance(self._default_vars, dict):
raise AnsibleParserError("The defaults/main.yml file for role '%s' must contain a dictionary of variables" % self._role_name)
def _load_role_yaml(self, subdir, main=None, allow_dir=False):
file_path = os.path.join(self._role_path, subdir)
if self._loader.path_exists(file_path) and self._loader.is_directory(file_path):
# Valid extensions and ordering for roles is hard-coded to maintain
# role portability
extensions = ['.yml', '.yaml', '.json']
# If no <main> is specified by the user, look for files with
# extensions before bare name. Otherwise, look for bare name first.
if main is None:
_main = 'main'
extensions.append('')
else:
_main = main
extensions.insert(0, '')
found_files = self._loader.find_vars_files(file_path, _main, extensions, allow_dir)
if found_files:
data = {}
for found in found_files:
new_data = self._loader.load_from_file(found)
if new_data and allow_dir:
data = combine_vars(data, new_data)
else:
data = new_data
return data
elif main is not None:
raise AnsibleParserError("Could not find specified file in role: %s/%s" % (subdir, main))
return None
def _load_dependencies(self):
'''
Recursively loads role dependencies from the metadata list of
dependencies, if it exists
'''
deps = []
if self._metadata:
for role_include in self._metadata.dependencies:
r = Role.load(role_include, play=self._play, parent_role=self)
deps.append(r)
return deps
# other functions
def add_parent(self, parent_role):
''' adds a role to the list of this roles parents '''
if not isinstance(parent_role, Role):
raise AnsibleAssertionError()
if parent_role not in self._parents:
self._parents.append(parent_role)
def get_parents(self):
return self._parents
def get_default_vars(self, dep_chain=None):
dep_chain = [] if dep_chain is None else dep_chain
default_vars = dict()
for dep in self.get_all_dependencies():
default_vars = combine_vars(default_vars, dep.get_default_vars())
if dep_chain:
for parent in dep_chain:
default_vars = combine_vars(default_vars, parent._default_vars)
default_vars = combine_vars(default_vars, self._default_vars)
return default_vars
def get_inherited_vars(self, dep_chain=None):
dep_chain = [] if dep_chain is None else dep_chain
inherited_vars = dict()
if dep_chain:
for parent in dep_chain:
inherited_vars = combine_vars(inherited_vars, parent._role_vars)
return inherited_vars
def get_role_params(self, dep_chain=None):
dep_chain = [] if dep_chain is None else dep_chain
params = {}
if dep_chain:
for parent in dep_chain:
params = combine_vars(params, parent._role_params)
params = combine_vars(params, self._role_params)
return params
def get_vars(self, dep_chain=None, include_params=True):
dep_chain = [] if dep_chain is None else dep_chain
all_vars = self.get_inherited_vars(dep_chain)
for dep in self.get_all_dependencies():
all_vars = combine_vars(all_vars, dep.get_vars(include_params=include_params))
all_vars = combine_vars(all_vars, self.vars)
all_vars = combine_vars(all_vars, self._role_vars)
if include_params:
all_vars = combine_vars(all_vars, self.get_role_params(dep_chain=dep_chain))
return all_vars
def get_direct_dependencies(self):
return self._dependencies[:]
def get_all_dependencies(self):
'''
Returns a list of all deps, built recursively from all child dependencies,
in the proper order in which they should be executed or evaluated.
'''
child_deps = []
for dep in self.get_direct_dependencies():
for child_dep in dep.get_all_dependencies():
child_deps.append(child_dep)
child_deps.append(dep)
return child_deps
def get_task_blocks(self):
return self._task_blocks[:]
def get_handler_blocks(self, play, dep_chain=None):
block_list = []
# update the dependency chain here
if dep_chain is None:
dep_chain = []
new_dep_chain = dep_chain + [self]
for dep in self.get_direct_dependencies():
dep_blocks = dep.get_handler_blocks(play=play, dep_chain=new_dep_chain)
block_list.extend(dep_blocks)
for task_block in self._handler_blocks:
new_task_block = task_block.copy()
new_task_block._dep_chain = new_dep_chain
new_task_block._play = play
block_list.append(new_task_block)
return block_list
def has_run(self, host):
'''
Returns true if this role has been iterated over completely and
at least one task was run
'''
return host.name in self._completed and not self._metadata.allow_duplicates
def compile(self, play, dep_chain=None):
'''
Returns the task list for this role, which is created by first
recursively compiling the tasks for all direct dependencies, and
then adding on the tasks for this role.
The role compile() also remembers and saves the dependency chain
with each task, so tasks know by which route they were found, and
can correctly take their parent's tags/conditionals into account.
'''
block_list = []
# update the dependency chain here
if dep_chain is None:
dep_chain = []
new_dep_chain = dep_chain + [self]
deps = self.get_direct_dependencies()
for dep in deps:
dep_blocks = dep.compile(play=play, dep_chain=new_dep_chain)
block_list.extend(dep_blocks)
for idx, task_block in enumerate(self._task_blocks):
new_task_block = task_block.copy()
new_task_block._dep_chain = new_dep_chain
new_task_block._play = play
if idx == len(self._task_blocks) - 1:
new_task_block._eor = True
block_list.append(new_task_block)
return block_list
def serialize(self, include_deps=True):
res = super(Role, self).serialize()
res['_role_name'] = self._role_name
res['_role_path'] = self._role_path
res['_role_vars'] = self._role_vars
res['_role_params'] = self._role_params
res['_default_vars'] = self._default_vars
res['_had_task_run'] = self._had_task_run.copy()
res['_completed'] = self._completed.copy()
if self._metadata:
res['_metadata'] = self._metadata.serialize()
if include_deps:
deps = []
for role in self.get_direct_dependencies():
deps.append(role.serialize())
res['_dependencies'] = deps
parents = []
for parent in self._parents:
parents.append(parent.serialize(include_deps=False))
res['_parents'] = parents
return res
def deserialize(self, data, include_deps=True):
self._role_name = data.get('_role_name', '')
self._role_path = data.get('_role_path', '')
self._role_vars = data.get('_role_vars', dict())
self._role_params = data.get('_role_params', dict())
self._default_vars = data.get('_default_vars', dict())
self._had_task_run = data.get('_had_task_run', dict())
self._completed = data.get('_completed', dict())
if include_deps:
deps = []
for dep in data.get('_dependencies', []):
r = Role()
r.deserialize(dep)
deps.append(r)
setattr(self, '_dependencies', deps)
parent_data = data.get('_parents', [])
parents = []
for parent in parent_data:
r = Role()
r.deserialize(parent, include_deps=False)
parents.append(r)
setattr(self, '_parents', parents)
metadata_data = data.get('_metadata')
if metadata_data:
m = RoleMetadata()
m.deserialize(metadata_data)
self._metadata = m
super(Role, self).deserialize(data)
def set_loader(self, loader):
self._loader = loader
for parent in self._parents:
parent.set_loader(loader)
for dep in self.get_direct_dependencies():
dep.set_loader(loader)
| gpl-3.0 | -2,407,202,554,963,732,500 | 37.837782 | 156 | 0.596437 | false | 4.219991 | false | false | false |
shaih/HElib | utils/tests/diff-threshold.py | 1 | 3144 | #!/usr/bin/env python3
# Copyright (C) 2020 IBM Corp.
# This program is Licensed under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License. See accompanying LICENSE file.
import argparse
import sys
import ast
import math
def diff_float(na, nb, threshold):
for a, b in zip(na, nb):
if not math.isclose(a, b, abs_tol=threshold):
raise ValueError(f"Difference {a - b} between {a} and {b} "
f"exceeds threshold {threshold}.")
def makeSameSize(a, b, max_length):
lenA, lenB = len(a), len(b)
if lenA > max_length or lenB > max_length:
raise ValueError(f"Size of slots for {a}({lenA}) {b}({lenB}) "
f"> {max_length}.")
if lenA == lenB:
return a, b
else:
maxSz = max(lenA, lenB)
a += [0] * (maxSz - lenA)
b += [0] * (maxSz - lenB)
return (a, b)
def parseCorrectly(la, lb, decrypt):
error_msg = "Type mismatch. {0}({1}) and {2}({3}) type do not match."
if decrypt:
for a, b in zip(la, lb):
a, b = ast.literal_eval(a), ast.literal_eval(b)
if type(a) is not type(b):
raise TypeError(error_msg.format(a, type(a), b, type(b)))
yield a, b
else:
for a, b in zip(la, lb):
a = [[ float(i) for i in a.split(",") ]]
b = [[ float(i) for i in b.split(",") ]]
if type(a) is not type(b):
raise TypeError(error_msg.format(a, type(a), b, type(b)))
yield a, b
def main():
parser = argparse.ArgumentParser()
parser.add_argument("firstfile", help="first data file", type=str)
parser.add_argument("secondfile", help="second data file", type=str)
parser.add_argument("--decrypt", help="diff decrypt format (instead of decode)",
action='store_true')
parser.add_argument("--threshold", help="error threshold [default=0.001]",
type=float, default=0.001)
args = parser.parse_args()
with open(args.firstfile, 'r') as f1, open(args.secondfile, 'r') as f2:
l1, l2 = list(f1), list(f2)
if len(l1) != len(l2):
sys.exit(f"Different number of lines. "
f"First contains {len(l1)} second contains {len(l2)}.")
if l1[0] != l2[0]:
sys.exit(f"File headers differ. {l1[0]} {l2[0]}.")
try:
for a, b in parseCorrectly(l1[1:], l2[1:], args.decrypt):
for sa, sb in zip(a, b):
sa, sb = makeSameSize(sa, sb, 2)
diff_float(sa, sb, args.threshold)
except (TypeError, ValueError) as e:
sys.exit(str(e))
if __name__ == "__main__":
main()
| apache-2.0 | -4,331,664,892,367,646,000 | 36.428571 | 84 | 0.57729 | false | 3.35539 | false | false | false |
Videonauth/passgen | tool/keyboard_list_generator.py | 1 | 2612 | #!/usr/bin/env python3
# -*- coding: UTF-8 -*-
############################################################################
#
# keyboard_list_generator.py
#
############################################################################
#
# Author: Videonauth <videonauth@googlemail.com>
# Date: 09.07.2016
# Purpose:
# Generate a word-list for the keyboard sequence check.
# Written for: Python 3.5.1
#
############################################################################
de_lowercase = "qwertzuiopü+asdfghjkllöä#yxcvbnm,.-"
de_uppercase = "°!§$%&/()=?WERTZUIOPÜ*ASDFGHJKLÖÄ'YXCVBNM;:_"
en_lowercase = "-=qwertyuiop[]asdfghjkl;'zxcvbnm,./"
en_uppercase = "~!@#$%^&*()_QWERTYUIOP{}ASDFGHJKL:ZXCVBNM<>?"
# next line might error out if destination file does not exist
with open('../lists/keyboard.wl', 'r+') as file:
for a in range(3, len(de_lowercase) + 1):
for b in range(len(de_lowercase)):
if len(de_lowercase[b: b + a]) == a:
file.write(de_lowercase[b: b + a] + '\n')
for a in range(3, len(de_uppercase) + 1):
for b in range(len(de_uppercase)):
if len(de_uppercase[b: b + a]) == a:
file.write(de_uppercase[b: b + a] + '\n')
for a in range(3, len(en_lowercase) + 1):
for b in range(len(en_lowercase)):
if len(en_lowercase[b: b + a]) == a:
file.write(en_lowercase[b: b + a] + '\n')
for a in range(3, len(en_uppercase) + 1):
for b in range(len(en_uppercase)):
if len(en_uppercase[b: b + a]) == a:
file.write(en_uppercase[b: b + a] + '\n')
de_lowercasere = de_lowercase[:: -1]
de_uppercasere = de_uppercase[:: -1]
en_lowercasere = en_lowercase[:: -1]
en_uppercasere = en_uppercase[:: -1]
for a in range(3, len(de_lowercasere) + 1):
for b in range(len(de_lowercasere)):
if len(de_lowercasere[b: b + a]) == a:
file.write(de_lowercasere[b: b + a] + '\n')
for a in range(3, len(de_uppercasere) + 1):
for b in range(len(de_uppercasere)):
if len(de_uppercasere[b: b + a]) == a:
file.write(de_uppercasere[b: b + a] + '\n')
for a in range(3, len(en_lowercasere) + 1):
for b in range(len(en_lowercasere)):
if len(en_lowercasere[b: b + a]) == a:
file.write(en_lowercasere[b: b + a] + '\n')
for a in range(3, len(en_uppercasere) + 1):
for b in range(len(en_uppercasere)):
if len(en_uppercasere[b: b + a]) == a:
file.write(en_uppercasere[b: b + a] + '\n')
file.close()
| mit | 7,557,594,228,450,199,000 | 41.688525 | 76 | 0.504224 | false | 3.059929 | false | false | false |
CNR-ISMAR/rectifiedgrid | rectifiedgrid/hillshade.py | 1 | 1108 | from matplotlib.colors import LightSource
# Adapted from https://github.com/jobar8/graphics
def alpha_blend(rgb, intensity, alpha=0.7):
return alpha * rgb + (1 - alpha) * intensity
def get_hs(data,
cmap,
norm=None,
zf=10,
azdeg=315,
altdeg=45,
dx=1,
dy=1,
fraction=1.5,
blend_mode='alpha',
alpha=0.7,
**kwargs_norm):
ls = LightSource(azdeg, altdeg)
if blend_mode == 'alpha':
# transparency blending
rgb = ls.shade(data, cmap=cmap,
norm=norm,
blend_mode=alpha_blend, vert_exag=zf, dx=dx, dy=dy,
fraction=fraction, alpha=alpha, **kwargs_norm)
else:
rgb = ls.shade(data,
cmap=cmap,
norm=norm,
blend_mode=blend_mode,
vert_exag=zf,
dx=dx,
dy=dy,
fraction=fraction,
**kwargs_norm)
return rgb
| gpl-3.0 | 3,936,537,537,660,941,000 | 28.157895 | 74 | 0.447653 | false | 3.971326 | false | false | false |
HyechurnJang/pygics | sample/simple_database.py | 1 | 1764 | # -*- coding: utf-8 -*-
'''
____ ___ ____________ ___ ___ ____ _________________
/ __ \/ _ | / __/ _/ __/ / _ \/ _ \/ __ \__ / / __/ ___/_ __/
/ /_/ / __ |_\ \_/ /_\ \ / ___/ , _/ /_/ / // / _// /__ / /
\____/_/ |_/___/___/___/ /_/ /_/|_|\____/\___/___/\___/ /_/
Operational Aid Source for Infra-Structure
Created on 2020. 3. 18..
@author: Hye-Churn Jang, CMBU Specialist in Korea, VMware [jangh@vmware.com]
'''
#===============================================================================
# Prepare PostgreSQL Server
#===============================================================================
# docker run --name postgres -p 5432:5432 -e POSTGRES_PASSWORD=password -e POSTGRES_USER=pygics -e POSTGRES_DB=pygicsdb -d postgres
from pygics import load, logInfo
load('modules.postgres')
# Login Database
SDK.PygicsDB.system('localhost:5432', 'pygics', 'password')
# "User" Table at "PygicsDB" Database
User = SDK.PygicsDB.User
logInfo('Create Users')
with SDK.PygicsDB: # Open Transaction for Create Records
User('Tony', 'Tony Stark', 'IronMan')
User('Peter', 'Peter Parker', 'SpiderMan')
User('Peter', 'Peter Pan', 'Elf')
logInfo('Get All Users\n{}'.format(User.list()))
# query form based SQLAlchemy
logInfo('Find All Peters\n{}'.format(User.list(User.name == 'Peter', order='id')))
with SDK.PygicsDB: # Open Transaction
tony = User.list(User.name == 'Tony')[0]
tony.nickname = 'Avengers Leader' # Update Data
tony.update()
logInfo('Check Tony Changed\n{}'.format(User.list(User.name == 'Tony')))
logInfo('Delete All Users')
with SDK.PygicsDB: # Open Transaction for Delete
for user in User.list():
user.delete()
logInfo('Check Users Empty\n{}'.format(User.list()))
| apache-2.0 | -6,248,814,847,715,503,000 | 34.28 | 131 | 0.521542 | false | 3.005111 | false | false | false |
markvdw/GParML | scg_adapted_local_MapReduce.py | 2 | 8715 | '''
A bunch of support functions used for SCG optimisation. They depend on the
parallel implementation framework, but may change for other optimisers.
'''
import glob
import time
import numpy
from os.path import splitext
from local_MapReduce import load, save
time_acc = {
'embeddings_set_grads' : [],
'embeddings_get_grads_mu' : [],
'embeddings_get_grads_kappa' : [],
'embeddings_get_grads_theta' : [],
'embeddings_get_grads_current_grad' : [],
'embeddings_get_grads_gamma' : [],
'embeddings_get_grads_max_d' : [],
'embeddings_set_grads_reset_d' : [],
'embeddings_set_grads_update_d' : [],
'embeddings_set_grads_update_X' : [],
'embeddings_set_grads_update_grad_old' : [],
'embeddings_set_grads_update_grad_new' : [],
}
'''
Initialisation for local statistics
'''
def embeddings_set_grads(folder):
'''
Sets the grads and other local statistics often needed for optimisation locally for
each node. This is currently only implemented locally, but could easly be adapted
to the MapReduce framework to be done on remote nodes in parallel. There's no real
need to do this in parallel though, as the computaions taking place are not that
time consuming.
'''
global time_acc
start = time.time()
input_files = sorted(glob.glob(folder + '/*.grad_latest.npy'))
for file_name in input_files:
grads = load(file_name)
#print 'grads'
#print grads
# Save grad new as the latest grad evaluated
new_file = splitext(splitext(file_name)[0])[0] + '.grad_new.npy'
save(new_file, grads)
# Init the old grad to be grad new
new_file = splitext(splitext(file_name)[0])[0] + '.grad_old.npy'
save(new_file, grads)
# Save the direction as the negative grad
new_file = splitext(splitext(file_name)[0])[0] + '.grad_d.npy'
save(new_file, -1 * grads)
end = time.time()
time_acc['embeddings_set_grads'] += [end - start]
'''
Getters for local statistics
'''
def embeddings_get_grads_mu(folder):
'''
Get the sum over the inputs of the inner product of the direction and grad_new
'''
global time_acc
start = time.time()
mu = 0
grad_new_files = sorted(glob.glob(folder + '/*.grad_new.npy'))
grad_d_files = sorted(glob.glob(folder + '/*.grad_d.npy'))
for grad_new_file, grad_d_file in zip(grad_new_files, grad_d_files):
grad_new = load(grad_new_file)
grad_d = load(grad_d_file)
mu += (grad_new * grad_d).sum()
end = time.time()
time_acc['embeddings_get_grads_mu'] += [end - start]
return mu
def embeddings_get_grads_kappa(folder):
'''
Get the sum over the inputs of the inner product of the direction with itself
'''
global time_acc
start = time.time()
kappa = 0
grad_d_files = sorted(glob.glob(folder + '/*.grad_d.npy'))
for grad_d_file in grad_d_files:
grad_d = load(grad_d_file)
kappa += (grad_d * grad_d).sum()
end = time.time()
time_acc['embeddings_get_grads_kappa'] += [end - start]
return kappa
def embeddings_get_grads_theta(folder):
'''
Get the sum over the inputs of the inner product of the direction and grad_latest
'''
global time_acc
start = time.time()
theta = 0
grad_new_files = sorted(glob.glob(folder + '/*.grad_new.npy'))
grad_latest_files = sorted(glob.glob(folder + '/*.grad_latest.npy'))
grad_d_files = sorted(glob.glob(folder + '/*.grad_d.npy'))
for grad_latest_file, grad_d_file, grad_new_file in zip(grad_latest_files, grad_d_files, grad_new_files):
grad_latest = load(grad_latest_file)
grad_new = load(grad_new_file)
grad_d = load(grad_d_file)
theta += (grad_d * (grad_latest - grad_new)).sum()
end = time.time()
time_acc['embeddings_get_grads_theta'] += [end - start]
return theta
def embeddings_get_grads_current_grad(folder):
'''
Get the sum over the inputs of the inner product of grad_new with itself
'''
global time_acc
start = time.time()
current_grad = 0
grad_new_files = sorted(glob.glob(folder + '/*.grad_new.npy'))
for grad_new_file in grad_new_files:
grad_new = load(grad_new_file)
current_grad += (grad_new * grad_new).sum()
end = time.time()
time_acc['embeddings_get_grads_current_grad'] += [end - start]
return current_grad
def embeddings_get_grads_gamma(folder):
'''
Get the sum over the inputs of the inner product of grad_old and grad_new
'''
global time_acc
start = time.time()
gamma = 0
grad_new_files = sorted(glob.glob(folder + '/*.grad_new.npy'))
grad_old_files = sorted(glob.glob(folder + '/*.grad_old.npy'))
for grad_new_file, grad_old_file in zip(grad_new_files, grad_old_files):
grad_new = load(grad_new_file)
grad_old = load(grad_old_file)
gamma += (grad_new * grad_old).sum()
end = time.time()
time_acc['embeddings_get_grads_gamma'] += [end - start]
return gamma
def embeddings_get_grads_max_d(folder, alpha):
'''
Get the max abs element of the direction over all input files
'''
global time_acc
start = time.time()
max_d = 0
grad_d_files = sorted(glob.glob(folder + '/*.grad_d.npy'))
for grad_d_file in grad_d_files:
grad_d = load(grad_d_file)
max_d = max(max_d, numpy.max(numpy.abs(alpha * grad_d)))
end = time.time()
time_acc['embeddings_get_grads_max_d'] += [end - start]
return max_d
'''
Setters for local statistics
'''
def embeddings_set_grads_reset_d(folder):
'''
Reset the direction to be the negative of grad_new
'''
global time_acc
start = time.time()
input_files = sorted(glob.glob(folder + '/*.grad_new.npy'))
for file_name in input_files:
grads = load(file_name)
# Save the direction as the negative grad
new_file = splitext(splitext(file_name)[0])[0] + '.grad_d.npy'
save(new_file, -1 * grads)
end = time.time()
time_acc['embeddings_set_grads_reset_d'] += [end - start]
def embeddings_set_grads_update_d(folder, gamma):
'''
Update the value of the direction for each input to be gamma (given) times the old direction
minus grad_new
'''
global time_acc
start = time.time()
grad_new_files = sorted(glob.glob(folder + '/*.grad_new.npy'))
grad_d_files = sorted(glob.glob(folder + '/*.grad_d.npy'))
for grad_new_file, grad_d_file in zip(grad_new_files, grad_d_files):
grad_new = load(grad_new_file)
grad_d = load(grad_d_file)
save(grad_d_file, gamma * grad_d - grad_new)
end = time.time()
time_acc['embeddings_set_grads_update_d'] += [end - start]
def embeddings_set_grads_update_X(folder, alpha):
'''
Update the value of the local embeddings and variances themselves to be X + alpha * direction
'''
global time_acc
start = time.time()
grad_d_files = sorted(glob.glob(folder + '/*.grad_d.npy'))
X_mu_files = sorted(glob.glob(folder + '/*.embedding.npy'))
X_S_files = sorted(glob.glob(folder + '/*.variance.npy'))
for grad_d_file, X_mu_file, X_S_file in zip(grad_d_files, X_mu_files, X_S_files):
grad_d = load(grad_d_file)
grad_d_X_mu = grad_d[0]
grad_d_X_S = grad_d[1]
X_mu = load(X_mu_file)
X_S = load(X_S_file)
#print 'X_mu'
#print X_mu
#print 'X_S'
#print X_S
save(X_mu_file, X_mu + alpha * grad_d_X_mu)
save(X_S_file, X_S + alpha * grad_d_X_S)
end = time.time()
time_acc['embeddings_set_grads_update_X'] += [end - start]
def embeddings_set_grads_update_grad_old(folder):
'''
Set grad_old to be grad_new
'''
global time_acc
start = time.time()
input_files = sorted(glob.glob(folder + '/*.grad_new.npy'))
for file_name in input_files:
grads = load(file_name)
# Save grad old as latest grad new
new_file = splitext(splitext(file_name)[0])[0] + '.grad_old.npy'
save(new_file, grads)
end = time.time()
time_acc['embeddings_set_grads_update_grad_old'] += [end - start]
def embeddings_set_grads_update_grad_new(folder):
'''
Set grad_new to be grad_latest (a temp grad that keeps changing every evaluation)
'''
global time_acc
start = time.time()
input_files = sorted(glob.glob(folder + '/*.grad_latest.npy'))
for file_name in input_files:
grads = load(file_name)
# Save grad old as latest grad new
new_file = splitext(splitext(file_name)[0])[0] + '.grad_new.npy'
save(new_file, grads)
end = time.time()
time_acc['embeddings_set_grads_update_grad_new'] += [end - start]
| bsd-3-clause | -4,492,889,811,067,905,000 | 34.864198 | 109 | 0.618589 | false | 3.226583 | true | false | false |
KarolBedkowski/photomagic | photomagick/filters/bw.py | 1 | 1431 | #!usr/bin/python
# -*- coding: utf-8 -*-
__plugins__ = ('BwLuminosity', 'BwGreen', 'BwOrange', 'BwRed', 'BwYellow',
'BwInfrared')
__version__ = '2011-03-20'
__author__ = 'Karol Będkowski'
__copyright__ = "Copyright (c) Karol Będkowski, 2011"
import ImageOps
from photomagick.common import colors
from photomagick.common.base_filter import BaseFilter
from photomagick.common.const import CATEGORY_BASE
class BwLuminosity(BaseFilter):
STEPS = 3
NAME = _("BW Luminosity")
CATEGORY = CATEGORY_BASE
def process(self, image):
yield 'Start...', image
image = colors.convert_to_luminosity(image)
yield 'Contrast...', image
image = ImageOps.autocontrast(image)
yield 'Done', image
class _BwFilter(BaseFilter):
STEPS = 3
NAME = 'BW Filter'
CATEGORY = CATEGORY_BASE
_COLOR = (1, 1, 1)
def process(self, image):
yield 'Start...', image
image = colors.color_mixer_monochrome(image, *self._COLOR)
yield 'Contrast...', image
image = ImageOps.autocontrast(image)
yield 'Done', image
class BwGreen(_BwFilter):
NAME = _('BW Green Filter')
_COLOR = 0.04, 0.27, 0.08
class BwOrange(_BwFilter):
NAME = _('BW Orange Filter')
_COLOR = (0.31, 0.09, 0)
class BwRed(_BwFilter):
NAME = _('BW Red Filter')
_COLOR = (0.35, 0.04, 0)
class BwYellow(_BwFilter):
NAME = _('BW Yellow Filter')
_COLOR = (0.24, 0.11, 0.05)
class BwInfrared(_BwFilter):
NAME = _('BW Infrared')
_COLOR = (0.15, 1.15, -0.30)
| gpl-2.0 | -3,300,862,538,246,330,000 | 20.651515 | 74 | 0.664801 | false | 2.593466 | false | false | false |
ReedAnders/deepmap | deepmap/nn.py | 1 | 5374 | # Copyright (C) 2016 Reed Anderson.
# From: https://github.com/ReedAnders/deepmap
# License: MIT BY https://opensource.org/licenses/MIT
import pickle, os, binascii
from collections import deque
import numpy as np
from math import exp
from random import random
class NodeMap:
def __init__(self, input_node_population=12, output_node_population=1, latent_node_population=400):
self.coordinate_map = []
self.input_nodes = [InputNode() for node in range(input_node_population)]
self.output_nodes = [OutputNode() for node in range(output_node_population)]
self.latent_nodes = [LatentNode() for node in range(latent_node_population)]
self.all_nodes = self.input_nodes + self.output_nodes + self.latent_nodes
def construct_map(self):
for node in self.all_nodes:
self.coordinate_map.append((node.name, node.coordinates))
for node in self.all_nodes:
node.find_neighbors(self.coordinate_map)
self.update_input_values()
# pickle.dump( self.coordinate_map, open( "pickles/coordinate_map.p", "wb" ) )
# pickle.dump( self.input_nodes, open( "pickles/input_nodes.p", "wb" ) )
# pickle.dump( self.output_nodes, open( "pickles/output_nodes.p", "wb" ) )
# pickle.dump( self.latent_nodes, open( "pickles/latent_nodes.p", "wb" ) )
def calculate_dimensions(self):
n_params = 0
for node in self.all_nodes:
n_params += 2
n_params += len(node.true_neighbor_index)
return n_params
def error(self, correct_labels, predicted_labels):
error = None
pattern_error = []
n_training_patterns = len(correct_labels)
for i in range(n_training_patterns):
_sum = sum([(y-o)**2 for y,o in zip(correct_labels, predicted_labels)])
pattern_error.append(_sum)
error = 1.0/n_training_patterns * sum(pattern_error)
return error
def train(self, training_patterns, param):
n_training_patterns = len(training_patterns)
for i in training_patterns:
n_labels = len(self.output_nodes)
inputs = i[:-n_labels]
c_labels = i[-n_labels:]
p_labels = self.evaluate_topology(inputs, param)
error = self.error(c_labels, p_labels)
fitness = 1 - error
print 'ERROR: %r' % (error)
return error, fitness
def evaluate_topology(self, data, param):
p_labels = []
for index, node in enumerate(self.input_nodes):
node.value = float(data[index])
# Trim parameters
p_len = len(param)
t_len = len(self.latent_nodes + self.output_nodes) * 2
w_len = p_len - t_len
w_para = param[:w_len]
# t_para = deque(param[w_len-2:])
# Evaluate function
for node in self.latent_nodes + self.output_nodes:
self.evaluate_weights(w_para)
t_para = deque(param[w_len-2:])
# for node in self.latent_nodes + self.output_nodes:
# node_topo_params = [t_para.popleft() for _i in range(2)]
# node.eval_neighbors(node_topo_params[0],node_topo_params[1])
# Return predicted labels
p_labels = [node.value for node in self.output_nodes]
return p_labels
def evaluate_weights(self, param):
w_para = deque(param)
for node in self.latent_nodes + self.output_nodes:
neighbors = len(node.true_neighbor_index)
node_weight_params = [w_para.popleft() for _i in range(neighbors)]
node.eval_sigmoid(node_weight_params)
self.update_input_values()
def update_input_values(self):
for node in self.output_nodes + self.latent_nodes:
for index in node.true_neighbor_index:
node.input_values.append(self.all_nodes[index].value)
class Node:
def __init__(self, dimensions=3):
self.name = binascii.b2a_hex(os.urandom(8))
self.coordinates = np.array([random() for i in range(dimensions)])
self.neighbors = []
self.true_neighbor_index = []
self.optimal_neighbor_set = set()
self.value = 0.0
def find_neighbors(self, coordinate_map):
for index, node in enumerate(coordinate_map):
if np.linalg.norm(self.coordinates-node[1]) < 0.3:
self.true_neighbor_index.append(index)
self.neighbors.append((node,True))
else:
self.neighbors.append((node,False))
# Two parameters between -1, 1
def eval_neighbors(self, lower_bound, upper_bound):
for index in self.true_neighbor_index:
dist = np.linalg.norm(self.coordinates-self.neighbors[index][0][1])
if dist > lower_bound and dist < upper_bound:
self.optimal_neighbor_set.add(index)
class InputNode(Node):
def __init__(self):
Node.__init__(self)
class LatentNode(Node):
def __init__(self):
Node.__init__(self)
self.value = random()
self.input_values = []
# Multiple parameters for n weights -1, 1
def eval_sigmoid(self, weights):
x = sum([w*v for w,v in zip(weights, self.input_values)])
self.value = 1 / (1 + exp(-x))
class OutputNode(LatentNode):
def __init__(self):
LatentNode.__init__(self)
| mit | -437,573,294,550,352,260 | 32.798742 | 103 | 0.600298 | false | 3.601877 | false | false | false |
aaronst/macholibre | macholibre/dictionary.py | 1 | 12913 | #!/usr/bin/env python
"""
Copyright 2016 Aaron Stephens <aaronjst93@gmail.com>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
# CPU Types, CPU Subtypes, Filetypes, Load Commands, Flags as defined in the
# following official Apple, inc. header files:
# /usr/include/mach/machine.h
# /usr/include/mach-o/loader.h
cert_slots = {
-1: 'root',
0: 'leaf'
}
hashes = {
0: 'No Hash',
1: 'SHA-1',
2: 'SHA-256'
}
segment_flags = {
1: 'HIGHVM',
2: 'FVMLIB',
4: 'NORELOC',
8: 'PROTECTED_VERSION_1'
}
n_types = {
0x0: 'UNDF',
0x2: 'ABS',
0xe: 'SECT',
0xc: 'PBUD',
0xa: 'INDR'
}
machos = {
4277009102: (False, False), # 32 bit, big endian
4277009103: (True, False), # 64 bit, big endian
3472551422: (False, True), # 32 bit, little endian
3489328638: (True, True) # 64 bit, little endian
}
requirements = {
1: 'HostRequirementType',
2: 'GuestRequirementType',
3: 'DesignatedRequirementType',
4: 'LibraryRequirementType',
5: 'PluginRequirementType',
}
indeces = {
0: 'CodeDirectorySlot',
1: 'InfoSlot',
2: 'RequirementsSlot',
3: 'ResourceDirSlot',
4: 'ApplicationSlot',
5: 'EntitlementSlot',
0x10000: 'SignatureSlot'
}
matches = {
0: 'matchExists',
1: 'matchEqual',
2: 'matchContains',
3: 'matchBeginsWith',
4: 'matchEndsWith',
5: 'matchLessThan',
6: 'matchGreaterThan',
7: 'matchLessEqual',
8: 'matchGreaterEqual'
}
protections = {
0b000: '---',
0b001: 'r--',
0b010: '-w-',
0b011: 'rw-',
0b100: '--x',
0b101: 'r-x',
0b110: '-wx',
0b111: 'rwx'
}
signatures = {
'REQUIREMENT': 0xfade0c00,
'REQUIREMENTS': 0xfade0c01,
'CODEDIRECTORY': 0xfade0c02,
'ENTITLEMENT': 0xfade7171,
'BLOBWRAPPER': 0xfade0b01,
'EMBEDDED_SIGNATURE': 0xfade0cc0,
'DETACHED_SIGNATURE': 0xfade0cc1,
'CODE_SIGN_DRS': 0xfade0c05
}
section_attrs = {
0x80000000: 'PURE_INSTRUCTIONS',
0x40000000: 'NO_TOC',
0x20000000: 'STRIP_STATIC_SYMS',
0x10000000: 'NO_DEAD_STRIP',
0x08000000: 'LIVE_SUPPORT',
0x04000000: 'SELF_MODIFYING_CODE',
0x02000000: 'DEBUG',
0x00000400: 'SOME_INSTRUCTIONS',
0x00000200: 'EXT_RELOC',
0x00000100: 'LOC_RELOC'
}
filetypes = {
1: 'OBJECT',
2: 'EXECUTE',
3: 'FVMLIB',
4: 'CORE',
5: 'PRELOAD',
6: 'DYLIB',
7: 'DYLINKER',
8: 'BUNDLE',
9: 'DYLIB_STUB',
10: 'DSYM',
11: 'KEXT_BUNDLE'
}
section_types = {
0x0: 'REGULAR',
0x1: 'ZEROFILL',
0x2: 'CSTRING_LITERALS',
0x3: '4BYTE_LITERALS',
0x4: '8BYTE_LITERALS',
0x5: 'LITERAL_POINTERS',
0x6: 'NON_LAZY_SYMBOL_POINTERS',
0x7: 'LAZY_SYMBOL_POINTERS',
0x8: 'SYMBOL_STUBS',
0x9: 'MOD_INIT_FUNC_POINTERS',
0xa: 'MOD_TERM_FUNC_POINTERS',
0xb: 'COALESCED',
0xc: 'GB_ZEROFILL',
0xd: 'INTERPOSING',
0xe: '16BYTE_LITERALS',
0xf: 'DTRACE_DOF',
0x10: 'LAZY_DYLIB_SYMBOL_POINTERS',
0x11: 'THREAD_LOCAL_REGULAR',
0x12: 'THREAD_LOCAL_ZEROFILL',
0x13: 'THREAD_LOCAL_VARIABLES',
0x14: 'THREAD_LOCAL_VARIABLE_POINTERS',
0x15: 'THREAD_LOCAL_INIT_FUNCTION_POINTERS'
}
operators = {
0: 'False',
1: 'True',
2: 'Ident',
3: 'AppleAnchor',
4: 'AnchorHash',
5: 'InfoKeyValue',
6: 'And',
7: 'Or',
8: 'CDHash',
9: 'Not',
10: 'InfoKeyField',
11: 'CertField',
12: 'TrustedCert',
13: 'TrustedCerts',
14: 'CertGeneric',
15: 'AppleGenericAnchor',
16: 'EntitlementField',
17: 'CertPolicy',
18: 'NamedAnchor',
19: 'NamedCode',
20: 'Platform'
}
thread_states = {
1: 'x86_THREAD_STATE32',
2: 'x86_FLOAT_STATE32',
3: 'x86_EXCEPTION_STATE32',
4: 'x86_THREAD_STATE64',
5: 'x86_FLOAT_STATE64',
6: 'x86_EXCEPTION_STATE64',
7: 'x86_THREAD_STATE',
8: 'x86_FLOAT_STATE',
9: 'x86_EXCEPTION_STATE',
10: 'x86_DEBUG_STATE32',
11: 'x86_DEBUG_STATE64',
12: 'x86_DEBUG_STATE',
13: 'THREAD_STATE_NONE',
14: 'x86_SAVED_STATE_1 (INTERNAL ONLY)',
15: 'x86_SAVED_STATE_2 (INTERNAL ONLY)',
16: 'x86_AVX_STATE32',
17: 'x86_AVX_STATE64',
18: 'x86_AVX_STATE'
}
flags = {
1: 'NOUNDEFS',
2: 'INCRLINK',
4: 'DYLDLINK',
8: 'BINDATLOAD',
16: 'PREBOUND',
32: 'SPLIT_SEGS',
64: 'LAZY_INIT',
128: 'TWOLEVEL',
256: 'FORCE_FLAT',
512: 'NOMULTIDEFS',
1024: 'NOFIXPREBINDING',
2048: 'PREBINDABLE',
4096: 'ALLMODSBOUND',
8192: 'SUBSECTIONS_VIA_SYMBOLS',
16384: 'CANONICAL',
32768: 'WEAK_DEFINES',
65536: 'BINDS_TO_WEAK',
131072: 'ALLOW_STACK_EXECUTION',
262144: 'ROOT_SAFE',
524288: 'SETUID_SAFE',
1048576: 'NOREEXPORTED_DYLIBS',
2097152: 'PIE',
4194304: 'DEAD_STRIPPABLE_DYLIB',
8388608: 'HAS_TLV_DESCRIPTORS',
16777216: 'NO_HEAP_EXECUTION',
33554432: 'APP_EXTENSION_SAFE'
}
stabs = {
0x20: 'GSYM',
0x22: 'FNAME',
0x24: 'FUN',
0x26: 'STSYM',
0x28: 'LCSYM',
0x2a: 'MAIN',
0x2e: 'BNSYM',
0x30: 'PC',
0x32: 'AST',
0x3a: 'MAC_UNDEF',
0x3c: 'OPT',
0x40: 'RSYM',
0x44: 'SLINE',
0x46: 'DSLINE',
0x48: 'BSLINE',
0x4e: 'ENSYM',
0x60: 'SSYM',
0x64: 'SO',
0x66: 'OSO',
0x80: 'LSYM',
0x82: 'BINCL',
0x84: 'SOL',
0x86: 'PARAMS',
0x88: 'VERSION',
0x8a: 'OLEVEL',
0xa0: 'PSYM',
0xa2: 'EINCL',
0xa4: 'ENTRY',
0xc0: 'LBRAC',
0xc2: 'EXCL',
0xe0: 'RBRAC',
0xe2: 'BCOMM',
0xe4: 'ECOMM',
0xe8: 'ECOML',
0xfe: 'LENG'
}
loadcommands = {
1: 'SEGMENT',
2: 'SYMTAB',
3: 'SYMSEG',
4: 'THREAD',
5: 'UNIXTHREAD',
6: 'LOADFVMLIB',
7: 'IDFVMLIB',
8: 'IDENT',
9: 'FVMFILE',
10: 'PREPAGE',
11: 'DYSYMTAB',
12: 'LOAD_DYLIB',
13: 'ID_DYLIB',
14: 'LOAD_DYLINKER',
15: 'ID_DYLINKER',
16: 'PREBOUND_DYLIB',
17: 'ROUTINES',
18: 'SUB_FRAMEWORK',
19: 'SUB_UMBRELLA',
20: 'SUB_CLIENT',
21: 'SUB_LIBRARY',
22: 'TWOLEVEL_HINTS',
23: 'PREBIND_CKSUM',
25: 'SEGMENT_64',
26: 'ROUTINES_64',
27: 'UUID',
29: 'CODE_SIGNATURE',
30: 'SEGMENT_SPLIT_INFO',
32: 'LAZY_LOAD_DYLIB',
33: 'ENCRYPTION_INFO',
34: 'DYLD_INFO',
36: 'VERSION_MIN_MACOSX',
37: 'VERSION_MIN_IPHONEOS',
38: 'FUNCTION_STARTS',
39: 'DYLD_ENVIRONMENT',
41: 'DATA_IN_CODE',
42: 'SOURCE_VERSION',
43: 'DYLIB_CODE_SIGN_DRS',
44: 'ENCRYPTION_INFO_64',
45: 'LINKER_OPTION',
46: 'LINKER_OPTIMIZATION_HINT',
47: 'VERSION_MIN_TVOS',
48: 'VERSION_MIN_WATCHOS',
49: 'NOTE',
50: 'BUILD_VERSION',
2147483672: 'LOAD_WEAK_DYLIB',
2147483676: 'RPATH',
2147483679: 'REEXPORT_DYLIB',
2147483682: 'DYLD_INFO_ONLY',
2147483683: 'LOAD_UPWARD_DYLIB',
2147483688: 'MAIN',
}
# CPU Types & Subtypes as defined in
# http://opensource.apple.com/source/cctools/cctools-822/include/mach/machine.h
cputypes = {
-1: {
-2: 'ANY',
-1: 'MULTIPLE',
0: 'LITTLE_ENDIAN',
1: 'BIG_ENDIAN'
},
1: {
-2: 'VAX',
-1: 'MULTIPLE',
0: 'VAX_ALL',
1: 'VAX780',
2: 'VAX785',
3: 'VAX750',
4: 'VAX730',
5: 'UVAXI',
6: 'UVAXII',
7: 'VAX8200',
8: 'VAX8500',
9: 'VAX8600',
10: 'VAX8650',
11: 'VAX8800',
12: 'UVAXIII'
},
6: {
-2: 'MC680x0',
-1: 'MULTIPLE',
1: 'MC680x0_ALL or MC68030',
2: 'MC68040',
3: 'MC68030_ONLY'
},
7: {-2: 'X86 (I386)',
-1: 'MULITPLE',
0: 'INTEL_MODEL_ALL',
3: 'X86_ALL, X86_64_ALL, I386_ALL, or 386',
4: 'X86_ARCH1 or 486',
5: '586 or PENT',
8: 'X86_64_H or PENTIUM_3',
9: 'PENTIUM_M',
10: 'PENTIUM_4',
11: 'ITANIUM',
12: 'XEON',
15: 'INTEL_FAMILY_MAX',
22: 'PENTPRO',
24: 'PENTIUM_3_M',
26: 'PENTIUM_4_M',
27: 'ITANIUM_2',
28: 'XEON_MP',
40: 'PENTIUM_3_XEON',
54: 'PENTII_M3',
86: 'PENTII_M5',
103: 'CELERON',
119: 'CELERON_MOBILE',
132: '486SX'
},
10: {
-2: 'MC98000',
-1: 'MULTIPLE',
0: 'MC98000_ALL',
1: 'MC98601'
},
11: {
-2: 'HPPA',
-1: 'MULITPLE',
0: 'HPPA_ALL or HPPA_7100',
1: 'HPPA_7100LC'
},
12: {
-2: 'ARM',
-1: 'MULTIPLE',
0: 'ARM_ALL',
1: 'ARM_A500_ARCH',
2: 'ARM_A500',
3: 'ARM_A440',
4: 'ARM_M4',
5: 'ARM_V4T',
6: 'ARM_V6',
7: 'ARM_V5TEJ',
8: 'ARM_XSCALE',
9: 'ARM_V7',
10: 'ARM_V7F',
11: 'ARM_V7S',
12: 'ARM_V7K',
13: 'ARM_V8',
14: 'ARM_V6M',
15: 'ARM_V7M',
16: 'ARM_V7EM'
},
13: {
-2: 'MC88000',
-1: 'MULTIPLE',
0: 'MC88000_ALL',
1: 'MMAX_JPC or MC88100',
2: 'MC88110'
},
14: {
-2: 'SPARC',
-1: 'MULTIPLE',
0: 'SPARC_ALL or SUN4_ALL',
1: 'SUN4_260',
2: 'SUN4_110'
},
15: {
-2: 'I860 (big-endian)',
-1: 'MULTIPLE',
0: 'I860_ALL',
1: 'I860_860'
},
18: {
-2: 'POWERPC',
-1: 'MULTIPLE',
0: 'POWERPC_ALL',
1: 'POWERPC_601',
2: 'POWERPC_602',
3: 'POWERPC_603',
4: 'POWERPC_603e',
5: 'POWERPC_603ev',
6: 'POWERPC_604',
7: 'POWERPC_604e',
8: 'POWERPC_620',
9: 'POWERPC_750',
10: 'POWERPC_7400',
11: 'POWERPC_7450',
100: 'POWERPC_970'
},
16777223: {
-2: 'X86_64',
-1: 'MULTIPLE',
0: 'INTEL_MODEL_ALL',
3: 'X86_ALL, X86_64_ALL, I386_ALL, or 386',
4: 'X86_ARCH1 or 486',
5: '586 or PENT',
8: 'X86_64_H or PENTIUM_3',
9: 'PENTIUM_M',
10: 'PENTIUM_4',
11: 'ITANIUM',
12: 'XEON',
15: 'INTEL_FAMILY_MAX',
22: 'PENTPRO',
24: 'PENTIUM_3_M',
26: 'PENTIUM_4_M',
27: 'ITANIUM_2',
28: 'XEON_MP',
40: 'PENTIUM_3_XEON',
54: 'PENTII_M3',
86: 'PENTII_M5',
103: 'CELERON',
119: 'CELERON_MOBILE',
132: '486SX',
2147483648 + 0: 'INTEL_MODEL_ALL',
2147483648 + 3: 'X86_ALL, X86_64_ALL, I386_ALL, or 386',
2147483648 + 4: 'X86_ARCH1 or 486',
2147483648 + 5: '586 or PENT',
2147483648 + 8: 'X86_64_H or PENTIUM_3',
2147483648 + 9: 'PENTIUM_M',
2147483648 + 10: 'PENTIUM_4',
2147483648 + 11: 'ITANIUM',
2147483648 + 12: 'XEON',
2147483648 + 15: 'INTEL_FAMILY_MAX',
2147483648 + 22: 'PENTPRO',
2147483648 + 24: 'PENTIUM_3_M',
2147483648 + 26: 'PENTIUM_4_M',
2147483648 + 27: 'ITANIUM_2',
2147483648 + 28: 'XEON_MP',
2147483648 + 40: 'PENTIUM_3_XEON',
2147483648 + 54: 'PENTII_M3',
2147483648 + 86: 'PENTII_M5',
2147483648 + 103: 'CELERON',
2147483648 + 119: 'CELERON_MOBILE',
2147483648 + 132: '486SX'
},
16777228: {
-2: 'ARM64',
-1: 'MULTIPLE',
0: 'ARM64_ALL',
1: 'ARM64_V8',
2147483648 + 0: 'ARM64_ALL',
2147483648 + 1: 'ARM64_V8'
},
16777234: {
-2: 'POWERPC64',
-1: 'MULTIPLE',
0: 'POWERPC_ALL',
1: 'POWERPC_601',
2: 'POWERPC_602',
3: 'POWERPC_603',
4: 'POWERPC_603e',
5: 'POWERPC_603ev',
6: 'POWERPC_604',
7: 'POWERPC_604e',
8: 'POWERPC_620',
9: 'POWERPC_750',
10: 'POWERPC_7400',
11: 'POWERPC_7450',
100: 'POWERPC_970',
2147483648 + 0: 'POWERPC_ALL (LIB64)',
2147483648 + 1: 'POWERPC_601 (LIB64)',
2147483648 + 2: 'POWERPC_602 (LIB64)',
2147483648 + 3: 'POWERPC_603 (LIB64)',
2147483648 + 4: 'POWERPC_603e (LIB64)',
2147483648 + 5: 'POWERPC_603ev (LIB64)',
2147483648 + 6: 'POWERPC_604 (LIB64)',
2147483648 + 7: 'POWERPC_604e (LIB64)',
2147483648 + 8: 'POWERPC_620 (LIB64)',
2147483648 + 9: 'POWERPC_750 (LIB64)',
2147483648 + 10: 'POWERPC_7400 (LIB64)',
2147483648 + 11: 'POWERPC_7450 (LIB64)',
2147483648 + 100: 'POWERPC_970 (LIB64)'
}
}
| apache-2.0 | 3,063,381,205,710,470,000 | 22.912963 | 79 | 0.521567 | false | 2.603427 | false | false | false |
Multiscale-Genomics/mg-process-fastq | tadbit_model_wrapper.py | 1 | 10041 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
.. See the NOTICE file distributed with this work for additional information
regarding copyright ownership.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import print_function
import os.path
import argparse
import sys
import tarfile
import multiprocessing
import json
import shutil
try:
from urllib2 import urlopen
except ImportError:
from urllib.request import urlopen
from random import random
from string import ascii_letters as letters
from basic_modules.workflow import Workflow
from basic_modules.metadata import Metadata
from utils import logger
from utils import remap
from tool.common import CommandLineParser
from tool.common import format_utils
from tool.tb_model import tbModelTool
if '/opt/COMPSs/Bindings/python' in sys.path:
sys.path.pop(sys.path.index('/opt/COMPSs/Bindings/python'))
# ------------------------------------------------------------------------------
class tadbit_model(Workflow): # pylint: disable=invalid-name,too-few-public-methods
"""
Wrapper for the VRE form TADbit model.
It has two main sections:
- looks for optimal parameters for modeling a region
- models a region for a given optimal parameters
.
"""
configuration = {}
def __init__(self, configuration=None):
"""
Initialise the tool with its configuration.
Parameters
----------
configuration : dict
a dictionary containing parameters that define how the operation
should be carried out, which are specific to each Tool.
"""
tool_extra_config = json.load(open(os.path.dirname(
os.path.abspath(__file__))+'/tadbit_wrappers_config.json'))
os.environ["PATH"] += os.pathsep + format_utils.convert_from_unicode(
tool_extra_config["bin_path"])
if configuration is None:
configuration = {}
self.configuration.update(format_utils.convert_from_unicode(configuration))
# Number of cores available
num_cores = multiprocessing.cpu_count()
self.configuration["ncpus"] = num_cores
tmp_name = ''.join([letters[int(random()*52)]for _ in range(5)])
if 'execution' in self.configuration:
self.configuration['project'] = self.configuration['execution']
self.configuration['workdir'] = self.configuration['project']+'/_tmp_tadbit_'+tmp_name
if not os.path.exists(self.configuration['workdir']):
os.makedirs(self.configuration['workdir'])
self.configuration["optimize_only"] = "generation:num_mod_comp" not in self.configuration
if "optimization:max_dist" in self.configuration and \
not self.configuration["optimize_only"]:
del self.configuration["optimization:max_dist"]
del self.configuration["optimization:upper_bound"]
del self.configuration["optimization:lower_bound"]
del self.configuration["optimization:cutoff"]
self.configuration.update(
{(key.split(':'))[-1]: val for key, val in self.configuration.items()}
)
if self.configuration["gen_pos_chrom_name"] == 'all':
self.configuration["gen_pos_chrom_name"] = ""
self.configuration["gen_pos_begin"] = ""
self.configuration["gen_pos_end"] = ""
if "gen_pos_begin" not in self.configuration:
self.configuration["gen_pos_begin"] = ""
if "gen_pos_end" not in self.configuration:
self.configuration["gen_pos_end"] = ""
def run(self, input_files, metadata, output_files):
"""
Parameters
----------
files_ids : list
List of file locations
metadata : list
Required meta data
output_files : list
List of output file locations
Returns
-------
outputfiles : list
List of locations for the output bam files
"""
logger.info(
"PROCESS MODEL - FILES PASSED TO TOOLS: {0}".format(
str(input_files["hic_contacts_matrix_norm"]))
)
m_results_meta = {}
m_results_files = {}
if "norm" in metadata['hic_contacts_matrix_norm'].meta_data:
if metadata['hic_contacts_matrix_norm'].meta_data["norm"] != 'norm':
clean_temps(self.configuration['workdir'])
logger.fatal("Only normalized matrices can be used to build 3D models.\nExiting")
raise ValueError('Missing normalized input matrix.')
input_metadata = remap(self.configuration,
"optimize_only", "gen_pos_chrom_name", "resolution", "gen_pos_begin",
"gen_pos_end", "max_dist", "upper_bound", "lower_bound", "cutoff",
"workdir", "project", "ncpus")
in_files = [format_utils.convert_from_unicode(input_files['hic_contacts_matrix_norm'])]
input_metadata["species"] = "Unknown"
input_metadata["assembly"] = "Unknown"
if "assembly" in metadata['hic_contacts_matrix_norm'].meta_data:
input_metadata["assembly"] = metadata['hic_contacts_matrix_norm'].meta_data["assembly"]
if metadata['hic_contacts_matrix_norm'].taxon_id:
dt_json = json.load(urlopen(
"http://www.ebi.ac.uk/ena/data/taxonomy/v1/taxon/tax-id/" +
str(metadata['hic_contacts_matrix_norm'].taxon_id)))
input_metadata["species"] = dt_json['scientificName']
input_metadata["num_mod_comp"] = self.configuration["num_mod_comp"]
input_metadata["num_mod_keep"] = self.configuration["num_mod_keep"]
tm_handler = tbModelTool()
tm_files, _ = tm_handler.run(in_files, input_metadata, [])
m_results_files["modeling_stats"] = self.configuration['project']+"/model_stats.tar.gz"
tar = tarfile.open(m_results_files["modeling_stats"], "w:gz")
tar.add(tm_files[0], arcname='modeling_files_and_stats')
tar.close()
if not self.configuration["optimize_only"]:
m_results_files["tadkit_models"] = self.configuration['project'] + "/" + \
os.path.basename(tm_files[1])
os.rename(tm_files[1], m_results_files["tadkit_models"])
m_results_meta["tadkit_models"] = Metadata(
data_type="chromatin_3dmodel_ensemble",
file_type="JSON",
file_path=m_results_files["tadkit_models"],
sources=in_files,
meta_data={
"description": "Ensemble of chromatin 3D structures",
"visible": True,
"assembly": input_metadata["assembly"]
},
taxon_id=metadata['hic_contacts_matrix_norm'].taxon_id)
# List of files to get saved
logger.info("TADBIT RESULTS: " + ','.join(
[str(m_results_files[k]) for k in m_results_files]))
m_results_meta["modeling_stats"] = Metadata(
data_type="tool_statistics",
file_type="TAR",
file_path=m_results_files["modeling_stats"],
sources=in_files,
meta_data={
"description": "TADbit modeling statistics and result files",
"visible": True
})
clean_temps(self.configuration['workdir'])
return m_results_files, m_results_meta
# ------------------------------------------------------------------------------
def main(args):
"""
Main function
"""
from apps.jsonapp import JSONApp
app = JSONApp()
result = app.launch(tadbit_model,
args.config,
args.in_metadata,
args.out_metadata)
return result
def clean_temps(working_path):
"""Cleans the workspace from temporal folder and scratch files"""
for the_file in os.listdir(working_path):
file_path = os.path.join(working_path, the_file)
try:
if os.path.isfile(file_path):
os.unlink(file_path)
elif os.path.isdir(file_path):
shutil.rmtree(file_path)
except OSError:
pass
try:
os.rmdir(working_path)
except OSError:
pass
logger.info('[CLEANING] Finished')
def make_absolute_path(files, root):
"""Make paths absolute."""
for role, path in files.items():
files[role] = os.path.join(root, path)
return files
# ------------------------------------------------------------------------------
if __name__ == "__main__":
sys._run_from_cmdl = True # pylint: disable=protected-access
# Set up the command line parameters
PARSER = argparse.ArgumentParser(description="TADbit map")
# Config file
PARSER.add_argument("--config", help="Configuration JSON file",
type=CommandLineParser.valid_file, metavar="config", required=True)
# Metadata
PARSER.add_argument("--in_metadata", help="Project metadata",
metavar="in_metadata", required=True)
# Output metadata
PARSER.add_argument("--out_metadata", help="Output metadata",
metavar="output_metadata", required=True)
# Log file
PARSER.add_argument("--log_file", help="Log file",
metavar="log_file", required=True)
IN_ARGS = PARSER.parse_args()
RESULTS = main(IN_ARGS)
| apache-2.0 | 7,750,510,198,224,055,000 | 35.915441 | 100 | 0.591077 | false | 4.224232 | true | false | false |
sjdv1982/seamless | seamless/communion_encode.py | 1 | 3566 | """
Encoding/decoding of communion messages
message must be a dict containing:
"mode": "request" or "response"
"id": 32-bit identifier, should increase
"content": None, bool, bytes, str, int, float, or tuple of str/int/float/bool
remaining keys: anything JSON-serializable
encoded message is binary, and consists of:
header SEAMLESS
tip: 0 for request, 1 for response
identifier: 32-bit
nrem: 32-bit, the length of the remaining keys buffer (after content)
content: is_str byte + remainder. For is_str:
0: No remainder, message is None
1: bool. remainder is 0 or 1
2: bytes. remainder is raw content
3: str. remainder is UTF-8 encoded content
4: int/float/tuple. remainder is JSON-encoded content.
rem: remaining keys buffer (JSON format)
"""
import numpy as np
import json
def communion_encode(msg):
assert msg["mode"] in ("request", "response")
m = 'SEAMLESS'.encode()
tip = b'\x00' if msg["mode"] == "request" else b'\x01'
m += tip
m += np.uint32(msg["id"]).tobytes()
remainder = msg.copy()
remainder.pop("mode")
remainder.pop("id")
remainder.pop("content")
if len(remainder.keys()):
rem = json.dumps(remainder).encode()
nrem = np.uint32(len(rem)).tobytes()
m += nrem
m += rem
else:
m += b'\x00\x00\x00\x00'
content = msg["content"]
if content is None:
m += b'\x00'
else:
assert isinstance(content, (str, int, float, bytes, bool, tuple)), content
if isinstance(content, bool):
is_str = b'\x01'
elif isinstance(content, (int, float, tuple)):
is_str = b'\x04'
else:
is_str = b'\x03' if isinstance(content, str) else b'\x02'
m += is_str
if isinstance(content, str):
content = content.encode()
elif isinstance(content, bool):
content = b'\x01' if content else b'\x00'
elif isinstance(content, (int, float, tuple)):
if isinstance(content, tuple):
for item in content:
assert item is None or isinstance(item, (str, int, float, bool)), type(item)
content = json.dumps(content).encode()
m += content
assert communion_decode(m) == msg, (communion_decode(m), msg)
return m
def communion_decode(m):
assert isinstance(m, bytes)
message = {}
head = 'SEAMLESS'.encode()
assert m[:len(head)] == head
m = m[len(head):]
tip = m[:1]
m = m[1:]
assert tip == b'\x01' or tip == b'\x00', tip
message["mode"] = "request" if tip == b'\x00' else "response"
l1, l2 = m[:4], m[4:8]
m = m[8:]
message["id"] = np.frombuffer(l1,np.uint32)[0]
nrem = np.frombuffer(l2,np.uint32)[0]
if nrem:
rem = m[:nrem]
rem = rem.decode()
rem = json.loads(rem)
message.update(rem)
m = m[nrem:]
is_str = m[:1]
if is_str == b'\x00':
content = None
elif is_str == b'\x01':
content = True if m[1:] == b'\x01' else False
elif is_str == b'\x04':
content = json.loads(m[1:])
assert isinstance(content, (int, float, list))
if isinstance(content, list):
for item in content:
assert item is None or isinstance(item, (str, int, float, bool)), type(item)
content = tuple(content)
else:
assert is_str == b'\x03' or is_str == b'\x02'
content = m[1:]
if is_str == b'\x03':
content = content.decode()
message["content"] = content
return message
| mit | -6,822,255,612,640,416,000 | 32.327103 | 96 | 0.576837 | false | 3.442085 | false | false | false |
wmaciel/van-crime | src/run_demo.py | 1 | 4046 | # coding=utf-8
__author__ = 'walthermaciel'
from geopy.geocoders import DataBC
from geopy.exc import GeopyError
from time import sleep
import sys
from ssl import SSLError
from create_feature_vector import create_vector
import os
import pandas as pd
from sklearn.externals import joblib
from sklearn.ensemble import RandomForestClassifier
from sklearn.ensemble import RandomForestRegressor
crime_id = {0: 'BNE Residential ',
1: 'Theft from Vehicle',
2: 'Other Thefts ',
3: 'Mischief ',
4: 'Theft of Vehicle ',
5: 'BNE Commercial '}
def gather_time():
print 'Year:\t',
year = sys.stdin.readline().strip()
month_ok = False
while not month_ok:
print 'Month:\t',
month = sys.stdin.readline().strip()
if 12 >= int(month) > 0:
month_ok = True
else:
print 'Nice try, champ...'
return int(year), int(month)
def gather_address():
print 'Street Number:\t',
st_num = sys.stdin.readline().strip()
print 'Street Name:\t',
st_name = sys.stdin.readline().strip()
address = st_num + ' ' + st_name + ', Vancouver, BC, Canada'
return address
def gather_lat_long(address):
print 'Researching lat long for ' + address + '...'
got_it = False
delay = 1
while not got_it:
if delay > 10:
print 'could not find address, exiting...'
exit()
try:
sleep(delay)
location = geolocator.geocode(address)
got_it = True
except (GeopyError, SSLError) as e:
delay *= 2
got_it = False
print '!!! Are you sure you got the right address? Trying again...'
print 'Got it!'
latitude = "{:.8f}".format(location.latitude)
longitude = "{:.8f}".format(location.longitude)
print 'LatLong:\t( ' + latitude + ', ' + longitude + ' )'
return location.latitude, location.longitude
def run_demo():
os.system('clear')
print '''
888 888 .d8888b. d8b
888 888 d88P Y88b Y8P
888 888 888 888
Y88b d88P 8888b. 88888b. 888 888d888 888 88888b.d88b. .d88b.
Y88b d88P "88b 888 "88b 888 888P" 888 888 "888 "88b d8P Y8b
Y88o88P .d888888 888 888 888 888 888 888 888 888 888 88888888
Y888P 888 888 888 888 Y88b d88P 888 888 888 888 888 Y8b.
Y8P "Y888888 888 888 "Y8888P" 888 888 888 888 888 "Y8888
------------------ https://github.com/wmaciel/van-crime -----------------
'''
year, month = gather_time()
address = gather_address()
latitude, longitude = gather_lat_long(address)
print 'Generating feature vector...',
f_vec = create_vector(int(year), int(month), latitude, longitude)
if isinstance(f_vec, int):
print 'Failed'
else:
print 'OK'
print 'Loading classification model...',
clf = joblib.load('../models/random_forest_model.p')
print 'OK'
print 'Loading regression model...',
reg = joblib.load('../models/RandomForestRegressor.p')
print 'OK'
print '\n\n----- Results -----'
print 'Probability of crime type, given that a crime happened:'
prob_list = clf.predict_proba(f_vec.as_matrix())[0]
for i, p in enumerate(prob_list):
print crime_id[i] + '\t' + "{:.2f}".format(p * 100) + '%'
print '--------------------------\n'
print 'Expected number of crimes to happen:'
expected = reg.predict(f_vec.as_matrix())[0]
print expected
print '--------------------------\n'
print 'Expected number of crimes to happen by type:'
for i, p in enumerate(prob_list):
print crime_id[i] + '\t' + "{:.2f}".format(p * expected)
if __name__ == '__main__':
geolocator = DataBC()
while True:
run_demo()
print '\npress enter to reset'
sys.stdin.readline()
| mit | 1,701,064,191,257,659,000 | 27.9 | 79 | 0.555116 | false | 3.440476 | false | false | false |
ESEGroup/Paraguai | domain/usuario/servico_crud_usuario.py | 1 | 3946 | #-*- coding: utf-8 -*-
from .usuario import Usuario
from .nivel_acesso import *
from .senha_criptografada import *
from domain.excecoes import *
from domain.email import EmailUsuarioCadastrado, EmailUsuarioAlterado, EmailUsuarioRemovido
class ServicoCRUDUsuario():
"""Essa classe modela um serviço CRUD para Usuários, que independe da
implementação do armazenamento.
:param repositorio: Objeto de RepositorioUsuario"""
def __init__(self, repositorio, servico_email):
self.repositorio = repositorio
self.servico_email = servico_email
def criar(self, dados):
"""Cria um Usuário. Implementa o UC12 (Adicionar Usuário).
:param dados: Objeto de DTOUsuario com os dados a serem inseridos."""
escolha = {
0: UsuarioComum(),
1: SistemaManutencao(),
2: Administrador(),
}
try:
nivelAcesso = escolha[dados.nivelAcesso]
except KeyError:
raise ExcecaoNivelAcessoInvalido
senhaCriptografada = SenhaCriptografada(dados.senha)
usuario = Usuario(dados.nome, dados.email, senhaCriptografada, nivelAcesso)
if self.repositorio.obter_por_email(dados.email):
raise ExcecaoUsuarioJaExistente
usuario = self.repositorio.inserir(usuario)
email = EmailUsuarioCadastrado(usuario, dados.senha)
self.servico_email.enviar(usuario.email, email)
return usuario
def alterar(self, _id, dados):
"""Atualiza os dados de um Usuário. Implementa o UC13 (Alterar Usuário).
:param _id: Número inteiro que representa o ID do Usuário desejado.
:param dados: Objeto de DTOUsuario com os dados a serem inseridos."""
usuario = self.repositorio.obter(_id)
if not usuario:
raise ExcecaoUsuarioInexistente
#Usuário que possui o e-mail para o qual se deseja alterar
usuarioDoEmail = self.repositorio.obter_por_email(dados.email)
if usuarioDoEmail and usuarioDoEmail.id != _id:
raise ExcecaoUsuarioJaExistente
escolha = {
0: UsuarioComum(),
1: SistemaManutencao(),
2: Administrador(),
}
try:
usuario.nivelAcesso = escolha[dados.nivelAcesso]
except KeyError:
raise ExcecaoNivelAcessoInvalido
usuario.nome = dados.nome
usuario.email = dados.email
if dados.senha:
usuario.senhaCriptografada = SenhaCriptografada(dados.senha)
self.repositorio.atualizar(usuario)
email = EmailUsuarioAlterado(usuario)
self.servico_email.enviar(usuario.email, email)
return usuario
def listar(self):
"""Lista todos os Usuários, retornando uma lista de objetos de Usuario.
Implementa parte do UC04 (Buscar Usuário)."""
return self.repositorio.listar()
def obter(self, _id):
"""Busca pelo Usuário de um ID fornecido e o retorna. Implementa
parte do UC04 (Buscar Usuário).
:param _id: Número inteiro que representa o ID do Usuário desejado."""
usuario = self.repositorio.obter(_id)
if not usuario:
raise ExcecaoUsuarioInexistente
return usuario
def remover(self, _id):
"""Remove o Usuário que possui o ID fornecido e o retorna, além de
cancelar todos os seus Agendamentos. Implementa o UCXXX (Remover Usuário).
:param _id: Número inteiro que representa o ID do Usuário desejado."""
#TODO: buscar por agendamentos associados ao Usuário com id _id
usuario = self.repositorio.obter(_id)
if not usuario:
raise ExcecaoUsuarioInexistente
email = EmailUsuarioRemovido(usuario)
self.servico_email.enviar(usuario.email, email)
#TODO: cancela todos os agendamentos da lista
return (self.repositorio.remover(_id), True)
| apache-2.0 | 3,734,639,932,427,047,000 | 30.894309 | 91 | 0.653581 | false | 2.910237 | false | false | false |
lotharwissler/bioinformatics | python/gff/droso-chromosome-reconstruction.py | 1 | 2939 | #!/usr/bin/python
import os, sys # low level handling, such as command line stuff
import string # string methods available
import getopt # comand line argument handling
from collections import defaultdict
from low import * # custom functions, written by myself
# =============================================================================
def show_help( ):
""" displays the program parameter list and usage information """
print >> sys.stderr, "usage: " + sys.argv[0] + " -d <gff-folder>"
stdout( " option description" )
stdout( " -h help (this text here)" )
stdout( " -d folder with gff files to parse" )
stdout( " " )
sys.exit(1)
# =============================================================================
def handle_arguments():
""" verifies the presence of all necessary arguments and returns the data dir """
if len ( sys.argv ) == 1:
stderr( "no arguments provided." )
show_help()
try: # check for the right arguments
keys, values = getopt.getopt( sys.argv[1:], "hd:" )
except getopt.GetoptError:
stderr( "invalid arguments provided." )
show_help()
args = {}
for key, value in keys:
if key == '-d': args['dir'] = value
if not args.has_key('dir'):
print >> sys.stderr, "gff dir argument missing."
show_help()
elif not dir_exists( args.get('dir') ):
print >> sys.stderr, "gff dir does not exist."
show_help()
if not args['dir'].endswith("/"): args['dir'] += '/'
return args
# =============================================================================
# === MAIN ====================================================================
# =============================================================================
def main( args ):
def process_gff_line(line, species):
if line.startswith("#") or len(line.rstrip()) == 0: return
columns = line.rstrip().split("\t")
if len(columns) != 9: return
type = columns[2]
if type != "gene": return
chr, start, stop, strand, descr = columns[0], columns[3], columns[4], columns[6], columns[8]
id = re.search("ID=([^;]+);", descr).group(1)
sys.stdout.write(species + "\t" + id + "\t")
print string.join([chr, start, stop, strand], "\t")
# =============================================================================
for filename in os.listdir(args['dir']):
gzip = 0
if not filename.endswith(".gff") and not filename.endswith(".gff.gz"): continue
species = filename[:filename.index("-")]
filename = args['dir'] + filename
if filename.endswith(".gff.gz"): gzip = 1
if gzip:
os.system("gunzip " + filename)
filename = filename[:-3]
fo = open(filename)
for line in fo: process_gff_line(line, species)
fo.close()
if gzip: os.system("gzip " + filename)
# =============================================================================
args = handle_arguments()
main( args )
| mit | 1,956,595,199,548,383,200 | 33.988095 | 96 | 0.50051 | false | 4.180654 | false | false | false |
kawamon/hue | apps/hbase/src/hbase/hbase_site.py | 2 | 3316 | #!/usr/bin/env python
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import errno
import logging
import os.path
import sys
from hadoop import confparse
from desktop.lib.security_util import get_components
if sys.version_info[0] > 2:
open_file = open
else:
open_file = file
LOG = logging.getLogger(__name__)
SITE_PATH = None
SITE_DICT = None
_CNF_HBASE_THRIFT_KERBEROS_PRINCIPAL = 'hbase.thrift.kerberos.principal'
_CNF_HBASE_THRIFT_SPNEGO_PRINCIPAL = 'hbase.thrift.spnego.principal'
_CNF_HBASE_AUTHENTICATION = 'hbase.security.authentication'
_CNF_HBASE_REGIONSERVER_THRIFT_FRAMED = 'hbase.regionserver.thrift.framed'
_CNF_HBASE_IMPERSONATION_ENABLED = 'hbase.thrift.support.proxyuser'
_CNF_HBASE_USE_THRIFT_HTTP = 'hbase.regionserver.thrift.http'
_CNF_HBASE_USE_THRIFT_SSL = 'hbase.thrift.ssl.enabled'
def reset():
global SITE_DICT
SITE_DICT = None
def get_conf():
if SITE_DICT is None:
_parse_site()
return SITE_DICT
def get_server_principal():
thrift_principal = get_conf().get(_CNF_HBASE_THRIFT_KERBEROS_PRINCIPAL, None)
principal = get_conf().get(_CNF_HBASE_THRIFT_SPNEGO_PRINCIPAL, thrift_principal)
components = get_components(principal)
if components is not None:
return components[0]
def get_server_authentication():
return get_conf().get(_CNF_HBASE_AUTHENTICATION, 'NOSASL').upper()
def get_thrift_transport():
use_framed = get_conf().get(_CNF_HBASE_REGIONSERVER_THRIFT_FRAMED)
if use_framed is not None:
if use_framed.upper() == "TRUE":
return "framed"
else:
return "buffered"
else:
#Avoid circular import
from hbase.conf import THRIFT_TRANSPORT
return THRIFT_TRANSPORT.get()
def is_impersonation_enabled():
#Avoid circular import
from hbase.conf import USE_DOAS
return get_conf().get(_CNF_HBASE_IMPERSONATION_ENABLED, 'FALSE').upper() == 'TRUE' or USE_DOAS.get()
def is_using_thrift_http():
#Avoid circular import
from hbase.conf import USE_DOAS
return get_conf().get(_CNF_HBASE_USE_THRIFT_HTTP, 'FALSE').upper() == 'TRUE' or USE_DOAS.get()
def is_using_thrift_ssl():
return get_conf().get(_CNF_HBASE_USE_THRIFT_SSL, 'FALSE').upper() == 'TRUE'
def _parse_site():
global SITE_DICT
global SITE_PATH
#Avoid circular import
from hbase.conf import HBASE_CONF_DIR
SITE_PATH = os.path.join(HBASE_CONF_DIR.get(), 'hbase-site.xml')
try:
data = open_file(SITE_PATH, 'r').read()
except IOError as err:
if err.errno != errno.ENOENT:
LOG.error('Cannot read from "%s": %s' % (SITE_PATH, err))
return
data = ""
SITE_DICT = confparse.ConfParse(data)
| apache-2.0 | 4,130,980,423,032,617,500 | 28.607143 | 102 | 0.721653 | false | 3.090401 | false | false | false |
windelbouwman/ppci-mirror | ppci/binutils/disasm.py | 1 | 1210 | """ Contains disassembler stuff. """
from ..arch.data_instructions import DByte
class Disassembler:
""" Base disassembler for some architecture """
def __init__(self, arch):
self.arch = arch
for instruction in arch.isa.instructions:
# print(instruction, instruction.patterns)
# for nl in instruction.non_leaves:
# print(' ', nl.patterns)
pass
def disasm(self, data, outs, address=0):
""" Disassemble data into an instruction stream """
# TODO: implement this!
# The trial and error method, will be slow as a snail:
# for instruction in self.arch.isa.instructions:
# for size in instruction.sizes():
# part = data[:size]
# try:
# print(instruction, part, size)
# i = instruction.decode(part)
# print(i)
# except ValueError:
# pass
# For now, all is bytes!
for byte in data:
ins = DByte(byte)
ins.address = address
outs.emit(ins)
address += len(ins.encode())
def take_one(self):
pass
| bsd-2-clause | -6,070,502,765,213,653,000 | 29.25 | 62 | 0.52562 | false | 4.384058 | false | false | false |
Domatix/stock-logistics-workflow | stock_split_picking/models/stock_picking.py | 2 | 3356 | # Copyright 2013-2015 Camptocamp SA - Nicolas Bessi
# Copyright 2018 Camptocamp SA - Julien Coux
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from odoo import _, api, models
from odoo.exceptions import UserError
from odoo.tools.float_utils import float_compare
class StockPicking(models.Model):
"""Adds picking split without done state."""
_inherit = "stock.picking"
@api.multi
def split_process(self):
"""Use to trigger the wizard from button with correct context"""
for picking in self:
# Check the picking state and condition before split
if picking.state == 'draft':
raise UserError(_('Mark as todo this picking please.'))
if all([x.qty_done == 0.0 for x in picking.move_line_ids]):
raise UserError(
_('You must enter done quantity in order to split your '
'picking in several ones.'))
# Split moves considering the qty_done on moves
new_moves = self.env['stock.move']
for move in picking.move_lines:
rounding = move.product_uom.rounding
qty_done = move.quantity_done
qty_initial = move.product_uom_qty
qty_diff_compare = float_compare(
qty_done, qty_initial, precision_rounding=rounding
)
if qty_diff_compare < 0:
qty_split = qty_initial - qty_done
qty_uom_split = move.product_uom._compute_quantity(
qty_split,
move.product_id.uom_id,
rounding_method='HALF-UP'
)
new_move_id = move._split(qty_uom_split)
for move_line in move.move_line_ids:
if move_line.product_qty and move_line.qty_done:
# To avoid an error
# when picking is partially available
try:
move_line.write(
{'product_uom_qty': move_line.qty_done})
except UserError:
pass
new_moves |= self.env['stock.move'].browse(new_move_id)
# If we have new moves to move, create the backorder picking
if new_moves:
backorder_picking = picking.copy({
'name': '/',
'move_lines': [],
'move_line_ids': [],
'backorder_id': picking.id,
})
picking.message_post(
_(
'The backorder <a href="#" '
'data-oe-model="stock.picking" '
'data-oe-id="%d">%s</a> has been created.'
) % (
backorder_picking.id,
backorder_picking.name
)
)
new_moves.write({
'picking_id': backorder_picking.id,
})
new_moves.mapped('move_line_ids').write({
'picking_id': backorder_picking.id,
})
new_moves._action_assign()
| agpl-3.0 | 3,325,539,118,000,248,000 | 40.95 | 76 | 0.470203 | false | 4.801144 | false | false | false |
tjctw/PythonNote | thinkstat/install_test.py | 2 | 1432 | """This file contains code used in "Think Stats",
by Allen B. Downey, available from greenteapress.com
Copyright 2010 Allen B. Downey
License: GNU GPLv3 http://www.gnu.org/licenses/gpl.html
"""
import math
import matplotlib.pyplot as pyplot
import myplot
import Pmf
def NormalPdf(x):
"""Computes the PDF of x in the standard normal distribution."""
return math.exp(-x**2/2) / math.sqrt(2 * math.pi)
def Linspace(start, stop, n):
"""Makes a list of n floats from start to stop.
Similar to numpy.linspace()
"""
return [start + (stop-start) * float(i)/(n-1) for i in range(n)]
def RenderPdf(mu, sigma, n=101):
"""Makes xs and ys for a normal PDF with (mu, sigma).
n: number of places to evaluate the PDF
"""
xs = Linspace(mu-4*sigma, mu+4*sigma, n)
ys = [NormalPdf((x-mu) / sigma) for x in xs]
return xs, ys
def main():
xs, ys = RenderPdf(100, 15)
n = 34
pyplot.fill_between(xs[-n:], ys[-n:], y2=0.0001, color='blue', alpha=0.2)
s = 'Congratulations!\nIf you got this far,\nyou must be here.'
d = dict(shrink=0.05)
pyplot.annotate(s, [127, 0.02], xytext=[80, 0.05], arrowprops=d)
myplot.Plot(xs, ys,
clf=False,
show=True,
title='Distribution of IQ',
xlabel='IQ',
ylabel='PDF',
legend=False
)
if __name__ == "__main__":
main()
| cc0-1.0 | -2,332,128,312,569,563,000 | 23.689655 | 77 | 0.587291 | false | 3.140351 | false | false | false |
LiqunHu/MVPN | testing/testUser.py | 1 | 2284 | # -*- coding: utf-8 -*-
"""
Created on Thu May 12 16:25:02 2016
@author: huliqun
"""
import requests
import json
import uuid
import base64
_SERVER_HOST = '127.0.0.1'
_SERVER_PORT = 8000
_SERVER_BASE_URL = 'http://{0}:{1}/api/users'.format(_SERVER_HOST, _SERVER_PORT)
headers = {'content-type':'application/json'}
body = '{"username":"wahaha@qq.com","displayname":"wahaha","email":"wahaha@qq.com","password":"123456","mobile":"18698729476"}'
#resp = requests.get(_SERVER_BASE_URL)
resp = requests.post(_SERVER_BASE_URL, headers=headers,data=body)
print(resp.text)
print(resp)
headers = {'Authorization': '3161cc5a950fead158ebe803f7e56822',
'Account-ID': '111111111111111',
'content-type':'application/json'}
password = '123456'
#resp = requests.get(_SERVER_BASE_URL, headers=headers,data=body)
#print(resp.text)
#print(resp)
import pyDes
import hashlib
def md5(s):
m = hashlib.md5()
m.update(s.encode("utf-8"))
return m.digest()
# For Python3, you'll need to use bytes, i.e.:
# data = b"Please encrypt my data"
# k = pyDes.des(b"DESCRYPT", pyDes.CBC, b"\0\0\0\0\0\0\0\0", pad=None, padmode=pyDes.PAD_PKCS5)
data = str(uuid.uuid4()).replace('-','')
k = pyDes.triple_des(md5('123456'), pyDes.CBC, "\0\0\0\0\0\0\0\0", pad=None, padmode=pyDes.PAD_PKCS5)
d = base64.b64encode(k.encrypt(data)).decode()
idf = base64.b64encode(k.encrypt('wahaha@qq.com')).decode()
headers = {'content-type':'application/json'}
bodyData = {
'username':'wahaha@qq.com',
'identifyCode':idf
}
print(idf)
body = json.dumps(bodyData)
print("Encrypted: %r" % idf)
print("Decrypted: %r" % k.decrypt(base64.b64decode(idf.encode())).decode() )
_SERVER_BASE_URL = 'http://{0}:{1}/api/auth'.format(_SERVER_HOST, _SERVER_PORT)
resp = requests.get(_SERVER_BASE_URL, headers=headers,data=body)
print(resp.text)
print(resp)
headers = {'Cookie':'awesession=c7f406241bcc49209eb58a527520e051-1465822334-fe92174a8e3956edc8befc20911a0b54c8f7b2db; Domain=aaaa.com;',
'content-type':'application/json'}
_SERVER_BASE_URL = 'http://{0}:{1}/api/users'.format(_SERVER_HOST, _SERVER_PORT)
resp = requests.get(_SERVER_BASE_URL, headers=headers,data=body)
print(resp.text)
print(resp)
| gpl-3.0 | -2,363,591,250,241,619,500 | 31.101449 | 136 | 0.662434 | false | 2.646582 | false | false | false |
End of preview. Expand
in Dataset Viewer.
- Downloads last month
- 36