content
stringlengths
0
894k
type
stringclasses
2 values
import matplotlib.pyplot as plt from .plot_utils import density_scatter def plot_params_vs_tbr(df, params, n_rows=3, n_columns=3, density_bins=80): '''Plot multiple params vs. TBR. Supplied parameters are expected to be tuples of column names and human-readable names (for labels).''' fig = plt.figure() for param_idx, (name, human_readable_name) in enumerate(params): xs = df[name].to_numpy() ys = df['tbr'].to_numpy() ax = plt.subplot(n_rows, n_columns, 1 + param_idx) if density_bins is None: ax.scatter(xs, ys, s=5) else: density_scatter(xs, ys, ax=ax, bins=density_bins, s=5) ax.set_xlabel(human_readable_name) ax.set_ylabel('TBR') return fig, ax
python
# benchmark.py # # A micro benchmark comparing the performance of sending messages into # a coroutine vs. sending messages into an object # An object class GrepHandler(object): def __init__(self,pattern, target): self.pattern = pattern self.target = target def send(self, line): if self.pattern in line: self.target.send(line) # A coroutine from coroutine import coroutine @coroutine def grep(pattern, target): while True: line = (yield) if pattern in line: target.send(line) # A null-sink to send data @coroutine def null(): while True: item = (yield) # A benchmark line = 'python is nice' p1 = grep('python', null()) # Coroutine p2 = GrepHandler('python', null()) # Object from timeit import timeit print("coroutine:", timeit("p1.send(line)", "from __main__ import line, p1")) print("object:", timeit("p2.send(line)", "from __main__ import line, p2")) # Understanding the performance difference # class GrepHandler(object): # ... # def send(self,line): # if self.pattern in line: -> Look at these self lookups! # self.target.send(line) -> Look at these self lookups! # # @coroutine # def grep(pattern, target): # while True: # line = (yield) # if pattern in line: -> "self" free # target.send(d) -> "self" free
python
# -*- coding: utf-8 -*- # Copyright (C) 2020. Huawei Technologies Co., Ltd. All rights reserved. # This program is free software; you can redistribute it and/or modify # it under the terms of the MIT License. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # MIT License for more details. """This is a class for Contrast.""" from PIL import ImageEnhance from .ops import float_parameter from vega.common import ClassFactory, ClassType @ClassFactory.register(ClassType.TRANSFORM) class Contrast(object): """Applies Contrast to 'img'. The Contrast operation controls the contrast of the image, level = 0 gives a gray image, whereas level = 1 gives the original image. :param level: Strength of the operation specified as an Integer from [0, 'PARAMETER_MAX']. :type level: int """ def __init__(self, level): """Construct the Contrast class.""" self.level = level def __call__(self, img): """Call function of Contrast. :param img: input image :type img: numpy or tensor :return: the image after transform :rtype: numpy or tensor """ v = float_parameter(self.level, 1.8) + .1 return ImageEnhance.Contrast(img).enhance(v)
python
# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import unittest import numpy as np import math from op_test import OpTest from test_lstm_op import identity, sigmoid, tanh, relu class TestGRUOp(OpTest): lod = [[2, 4, 3]] batch_size = sum(lod[0]) frame_size = 5 activate = { 'identity': identity, 'sigmoid': sigmoid, 'tanh': tanh, 'relu': relu } @staticmethod def seq_to_batch(lod, is_reverse): idx_in_seq_list = [] seq_lens = lod[0] seq_starts = [0] for i in range(len(seq_lens)): seq_starts.append(seq_starts[-1] + seq_lens[i]) sorted_seqs = sorted( list(range(len(seq_lens))), lambda x, y: seq_lens[y] - seq_lens[x]) num_batch = seq_lens[sorted_seqs[0]] for batch_idx in range(num_batch): idx_in_seq = [] for i in range(len(seq_lens)): if seq_lens[sorted_seqs[i]] <= batch_idx: break idx = (seq_starts[sorted_seqs[i] + 1] - 1 - batch_idx ) if is_reverse else ( seq_starts[sorted_seqs[i]] + batch_idx) idx_in_seq.append(idx) idx_in_seq_list.append(idx_in_seq) return idx_in_seq_list, sorted_seqs def gru_step(self, x, h_p, w, b): batch_size = x.shape[0] frame_size = w.shape[0] g = x + np.tile(b, (batch_size, 1)) w_u_r = w.flatten()[:frame_size * frame_size * 2].reshape( (frame_size, frame_size * 2)) u_r = self.activate[self.attrs['gate_activation']](np.dot( h_p, w_u_r) + g[:, :frame_size * 2]) u = u_r[:, :frame_size] r = u_r[:, frame_size:frame_size * 2] r_h_p = r * h_p w_c = w.flatten()[frame_size * frame_size * 2:].reshape( (frame_size, frame_size)) c = self.activate[self.attrs['activation']](np.dot(r_h_p, w_c) + g[:, frame_size * 2:]) g = np.hstack((u_r, c)) h = u * c + (1 - u) * h_p return g, r_h_p, h def gru(self): input, lod = self.inputs['Input'] w = self.inputs['Weight'] b = self.inputs['Bias'] if 'Bias' in self.inputs else np.zeros( (1, self.frame_size * 3)) batch_gate = self.outputs['BatchGate'] batch_reset_hidden_prev = self.outputs['BatchResetHiddenPrev'] batch_hidden = self.outputs['BatchHidden'] hidden = self.outputs['Hidden'] idx_in_seq_list = self.idx_in_seq_list h_p = self.inputs['H0'][ self.sorted_seqs] if 'H0' in self.inputs else np.zeros( (len(idx_in_seq_list[0]), self.frame_size)) num_batch = len(idx_in_seq_list) end_idx = 0 for batch_idx in range(num_batch): x = input[idx_in_seq_list[batch_idx]] g, r_h_p, h = self.gru_step(x, h_p, w, b) if batch_idx < (num_batch - 1): h_p = h[:len(idx_in_seq_list[batch_idx + 1])] start_idx = end_idx end_idx = start_idx + len(idx_in_seq_list[batch_idx]) batch_gate[start_idx:end_idx] = g batch_reset_hidden_prev[start_idx:end_idx] = r_h_p batch_hidden[start_idx:end_idx] = h hidden[idx_in_seq_list[batch_idx]] = h return batch_gate, batch_reset_hidden_prev, hidden def set_data(self): lod = self.lod self.idx_in_seq_list, self.sorted_seqs = self.seq_to_batch( lod, self.is_reverse) batch_size = self.batch_size frame_size = self.frame_size input = np.random.rand(batch_size, frame_size * 3).astype('float64') h0 = np.random.rand(len(self.idx_in_seq_list[0]), frame_size).astype('float64') weight = np.random.rand(frame_size, frame_size * 3).astype('float64') bias = np.random.rand(1, frame_size * 3).astype('float64') self.inputs = { 'Input': (input, lod), 'H0': h0, 'Weight': weight, 'Bias': bias } self.outputs = { 'BatchGate': np.zeros( (batch_size, frame_size * 3), dtype='float64'), 'BatchResetHiddenPrev': np.zeros( (batch_size, frame_size), dtype='float64'), 'BatchHidden': np.zeros( (batch_size, frame_size), dtype='float64'), 'Hidden': np.zeros( (batch_size, frame_size), dtype='float64') } def set_confs(self): self.is_reverse = False self.attrs = { 'activation': 'tanh', 'gate_activation': 'sigmoid', 'is_reverse': self.is_reverse } def setUp(self): self.op_type = "gru" self.set_confs() self.set_data() self.gru() def test_check_output(self): self.check_output() def test_check_grad(self): self.check_grad(['Input', 'H0', 'Weight', 'Bias'], ['Hidden']) class TestGRUOpNoInitial(TestGRUOp): def set_data(self): super(TestGRUOpNoInitial, self).set_data() self.inputs.pop('H0') def test_check_grad(self): self.check_grad(['Input', 'Weight', 'Bias'], ['Hidden']) class TestGRUOpReverse(TestGRUOp): def set_confs(self): self.is_reverse = True self.attrs = { 'activation': 'tanh', 'gate_activation': 'sigmoid', 'is_reverse': self.is_reverse } if __name__ == "__main__": unittest.main()
python
try: import oct2py except OSError as e: print(e)
python
# -*- coding: utf-8 -*- ''' Module for managing Infoblox Will look for pillar data infoblox:server, infoblox:user, infoblox:password if not passed to functions .. versionadded:: Boron :depends: - requests ''' from __future__ import absolute_import # Import salt libs from salt.exceptions import CommandExecutionError from salt.exceptions import SaltInvocationError import logging log = logging.getLogger(__name__) try: import json import requests HAS_IMPORTS = True except ImportError: HAS_IMPORTS = False def __virtual__(): if HAS_IMPORTS: return True return False def _conn_info_check(infoblox_server=None, infoblox_user=None, infoblox_password=None): ''' get infoblox stuff from pillar if not passed ''' if infoblox_server is None: infoblox_server = __salt__['pillar.get']('infoblox:server', None) if infoblox_user is None: infoblox_user = __salt__['pillar.get']('infoblox:user', None) log.debug('Infoblox username is "{0}"'.format(infoblox_user)) if infoblox_password is None: infoblox_password = __salt__['pillar.get']('infoblox:password', None) return infoblox_server, infoblox_user, infoblox_password def _process_return_data(retData): ''' generic return processing ''' if retData.status_code == 200: if retData.json(): return retData else: log.debug('no data returned from infoblox') return None else: msg = 'Unsuccessful error code {0} returned'.format(retData.status_code) log.error(msg) return None def delete_record(name, dns_view, record_type, infoblox_server=None, infoblox_user=None, infoblox_password=None, infoblox_api_version='v1.4.2', sslVerify=True): ''' delete a record name name of the record dns_view the DNS view to remove the record from record_type the record type (a, cname, host, etc) infoblox_server the infoblox server hostname (can also use the infoblox:server pillar) infoblox_user the infoblox user to connect with (can also use the infoblox:user pillar) infoblox_password the infoblox user's password (can also use the infolblox:password pillar) infoblox_api_version the infoblox api verison to use sslVerify should ssl verification be done on the connection to the Infoblox REST API CLI Example: .. code-block:: bash salt my-minion infoblox.delete_record some.dns.record MyInfobloxView A sslVerify=False ''' infoblox_server, infoblox_user, infoblox_password = _conn_info_check(infoblox_server, infoblox_user, infoblox_password) if infoblox_server is None and infoblox_user is None and infoblox_password is None: _throw_no_creds() return None currentRecords = get_record(name, record_type, infoblox_server, infoblox_user, infoblox_password, dns_view, infoblox_api_version, sslVerify) if currentRecords: for currentRecord in currentRecords: url = 'https://{0}/wapi/{1}/{2}'.format(infoblox_server, infoblox_api_version, currentRecord['Record ID']) ret = requests.delete(url, auth=(infoblox_user, infoblox_password), headers={'Content-Type': 'application/json'}, verify=sslVerify) if ret.status_code == 200: return True else: msg = 'Unsuccessful error code {0} returned -- full json dump {1}'.format(ret.status_code, ret.json()) raise CommandExecutionError(msg) return False def update_record(name, value, dns_view, record_type, infoblox_server=None, infoblox_user=None, infoblox_password=None, infoblox_api_version='v1.4.2', sslVerify=True): ''' update an entry to an infoblox dns view name the dns name value the value for the record record_type the record type (a, cname, etc) dns_view the DNS view to add the record to infoblox_server the infoblox server hostname (can also use the infoblox:server pillar) infoblox_user the infoblox user to connect with (can also use the infoblox:user pillar) infoblox_password the infoblox user's password (can also use the infolblox:password pillar) infoblox_api_version the infoblox api verison to use sslVerify should ssl verification be done on the connection to the Infoblox REST API CLI Example: .. code-block:: bash salt '*' infoblox.update_record alias.network.name canonical.network.name MyInfobloxView cname sslVerify=False ''' infoblox_server, infoblox_user, infoblox_password = _conn_info_check(infoblox_server, infoblox_user, infoblox_password) if infoblox_server is None and infoblox_user is None and infoblox_password is None: _throw_no_creds() return None currentRecords = get_record(name, record_type, infoblox_server, infoblox_user, infoblox_password, dns_view, infoblox_api_version, sslVerify) if currentRecords: for currentRecord in currentRecords: url = 'https://{0}/wapi/{1}/{2}'.format( infoblox_server, infoblox_api_version, currentRecord['Record ID']) data = None if record_type == 'cname': data = json.dumps({'canonical': value}) elif record_type == 'a': data = {'ipv4addrs': []} for i in value: data['ipv4addrs'].append({'ipv4addr': i}) data = json.dumps(data) ret = requests.put(url, data, auth=(infoblox_user, infoblox_password), headers={'Content-Type': 'application/json'}, verify=sslVerify) if ret.status_code == 200: return True else: msg = 'Unsuccessful status code {0} returned.'.format(ret.status_code) raise CommandExecutionError(msg) else: msg = 'Record {0} of type {1} was not found'.format(name, record_type) log.error(msg) return False def add_record(name, value, record_type, dns_view, infoblox_server=None, infoblox_user=None, infoblox_password=None, infoblox_api_version='v1.4.2', sslVerify=True): ''' add a record to an infoblox dns view name the record name value the value for the entry can make use of infoblox functions for next available IP, like 'func:nextavailableip:10.1.0.0/24' record_type the record type (cname, a, host, etc) dns_view the DNS view to add the record to infoblox_server the infoblox server hostname (can also use the infoblox:server pillar) infoblox_user the infoblox user to connect with (can also use the infoblox:user pillar) infoblox_password the infoblox user's password (can also use the infolblox:password pillar) infoblox_api_version the infoblox api verison to use sslVerify should ssl verification be done on the connection to the Infoblox REST API CLI Example: .. code-block:: bash salt 'myminion' infoblox.add_record alias.network.name canonical.network.name MyView ''' infoblox_server, infoblox_user, infoblox_password = _conn_info_check(infoblox_server, infoblox_user, infoblox_password) if infoblox_server is None and infoblox_user is None and infoblox_password is None: _throw_no_creds() return None record_type = record_type.lower() data = None url = None if record_type == 'cname': data = json.dumps({'name': name, 'canonical': value, 'view': dns_view}) if record_type == 'host' or record_type == 'a': data = json.dumps({'name': name, 'ipv4addrs': [{'ipv4addr': value}], 'view': dns_view}) #if record_type == 'alias': # data = json.dumps({'name': name, 'aliases': [value], 'view': dns_view}) # record_type = 'host' # tRec = get_record(name, # record_type, # infoblox_server, # infoblox_user, # infoblox_password, # dns_view, # infoblox_api_version, # sslVerify) # if not tRec: # log.error('A host record matching {0} was not found to add the alias to.'.format(name)) # return False # else: # for _rec in tRec: # url = 'https://{0}/wapi/{1}/{2}'.format( # infoblox_server, # infoblox_api_version, # _rec['Record ID']) url = 'https://{0}/wapi/{1}/record:{2}'.format(infoblox_server, infoblox_api_version, record_type) ret = requests.post(url, data, auth=(infoblox_user, infoblox_password), headers={'Content-Type': 'application/json'}, verify=sslVerify) if ret.status_code == 201: return True else: msg = 'Unsuccessful error code {0} returned -- full json dump {1}'.format(ret.status_code, ret.json()) raise CommandExecutionError(msg) def _throw_no_creds(): ''' helper function to log no credentials found error ''' msg = 'An infoblox server, username, and password must be specified or configured via pillar' raise SaltInvocationError(msg) def get_network(network_name, network_view=None, infoblox_server=None, infoblox_user=None, infoblox_password=None, infoblox_api_version='v1.4.2', sslVerify=True): ''' get a network from infoblox network_name The name of the network in IPAM network_view The name of the network view the network belongs to infoblox_server the infoblox server hostname (can also use the infoblox:server pillar) infoblox_user the infoblox user to connect with (can also use the infoblox:user pillar) infoblox_password the infoblox user's password (can also use the infolblox:password pillar) infoblox_api_version the infoblox api verison to use sslVerify should ssl verification be done on the connection to the Infoblox REST API CLI Example: .. code-block:: bash salt myminion infoblox.get_network '10.0.0.0/8' ''' records = [] infoblox_server, infoblox_user, infoblox_password = _conn_info_check(infoblox_server, infoblox_user, infoblox_password) if infoblox_server is None and infoblox_user is None and infoblox_password is None: _throw_no_creds() return None url = 'https://{0}/wapi/{1}/network?network={2}{3}'.format( infoblox_server, infoblox_api_version, network_name, ('' if network_view is None else '&network_view=' + network_view)) log.debug('Requst url is "{0}"'.format(url)) ret = _process_return_data(requests.get(url, auth=(infoblox_user, infoblox_password), verify=sslVerify)) if ret: for entry in ret.json(): log.debug('Infoblox record returned: {0}'.format(entry)) tEntry = {} data = _parse_record_data(entry) for key in data.keys(): tEntry[key] = data[key] records.append(tEntry) return records else: return False return False def get_record(record_name, record_type='host', infoblox_server=None, infoblox_user=None, infoblox_password=None, dns_view=None, infoblox_api_version='v1.4.2', sslVerify=True): ''' get a record from infoblox record_name name of the record to search for record_type type of reacord to search for (host, cname, a, etc...defaults to host) infoblox_server the infoblox server hostname (can also use the infoblox:server pillar) infoblox_user the infoblox user to connect with (can also use the infoblox:user pillar) infoblox_password the infoblox user's password (can also use the infolblox:password pillar) dns_view the infoblox DNS view to search, if not specified all views are searched infoblox_api_version the infoblox api verison to use sslVerify should ssl verification be done on the connection to the Infoblox REST API CLI Example: .. code-block:: bash salt myminion infoblox.get_record some.host.com A sslVerify=False ''' #TODO - verify record type (A, AAAA, CNAME< HOST, MX, PTR, SVR, TXT, host_ipv4addr, host_ipv6addr, naptr) records = [] infoblox_server, infoblox_user, infoblox_password = _conn_info_check(infoblox_server, infoblox_user, infoblox_password) if infoblox_server is None and infoblox_user is None and infoblox_password is None: _throw_no_creds() return None url = 'https://{0}/wapi/{1}/record:{3}?name:={2}{4}{5}'.format( infoblox_server, infoblox_api_version, record_name, record_type, ('' if dns_view is None else '&view=' + dns_view), ('&_return_fields%2B=aliases' if record_type == 'host' else '') ) log.debug('Requst url is "{0}"'.format(url)) ret = _process_return_data(requests.get(url, auth=(infoblox_user, infoblox_password), verify=sslVerify)) if ret: for entry in ret.json(): log.debug('Infoblox record returned: {0}'.format(entry)) tEntry = {} data = _parse_record_data(entry) for key in data.keys(): tEntry[key] = data[key] records.append(tEntry) return records else: return False return False def _parse_record_data(entry_data): ''' returns the right value data we'd be interested in for the specified record type ''' ret = {} ipv4addrs = [] aliases = [] if 'canonical' in entry_data: ret['Canonical Name'] = entry_data['canonical'] if 'ipv4addrs' in entry_data: for ipaddrs in entry_data['ipv4addrs']: ipv4addrs.append(ipaddrs['ipv4addr']) ret['IP Addresses'] = ipv4addrs if 'aliases' in entry_data: for alias in entry_data['aliases']: aliases.append(alias) ret['Aliases'] = aliases if 'name' in entry_data: ret['Name'] = entry_data['name'] if 'view' in entry_data: ret['DNS View'] = entry_data['view'] if 'network_view' in entry_data: ret['Network View'] = entry_data['network_view'] if 'comment' in entry_data: ret['Comment'] = entry_data['comment'] if 'network' in entry_data: ret['Network'] = entry_data['network'] if '_ref' in entry_data: ret['Record ID'] = entry_data['_ref'] return ret
python
import numpy as np import streamlit as st import pandas as pd from builder.helpers import * from builder.portfolio_builder import PortfolioBuilder def app(): model = st.container() pb0 = PortfolioBuilder(probability_weighted=False).init_data() with model: st.header("Original model presented by Bloomberg (2020)") st.markdown("The proposed machine learning algorithm for this task is XGBoost as it is a high performing model and [it can handle missing values without preprocessing](https://xgboost.readthedocs.io/en/latest/faq.html).") #st.markdown("For a given year Y and a given company, the label is the class computed for Y+1. \n The classes are built using the annual returns of stocks in excess of Eurostoxx 600 returns. Excess returns above +18% are classified as 'long', \n those between +18% and -15% are classified as 'omit' and those below -12% are put in the 'short' class. ") st.markdown("In the original paper, 4 annual portfolios are built for 2014, 2015, 2016 and 2017. \n For each year, the model is trained on the 7 previous years.\n Depending on the class predicted by the model, a position is taken in each stock. \n In the original model, _each stock in the same class is gven the same weight_.") st.subheader("Choose model hyperparameters:") col_1, col_2, col_3 = st.columns(3) year = col_1.selectbox("Choose year of interest for annual portfolio:", list(range(2014,2018)), index=3, key='model') n_estimators = col_2.slider("Choose number of trees in XGBoost model:",min_value=100, max_value=250, value=200, step=10) max_depth = col_3.slider("Choose maximum depth of trees in XGBoost model",min_value=3, max_value=10, value=5, step=1) params = update_params(n_estimators=n_estimators, max_depth=max_depth) pb1 = PortfolioBuilder(probability_weighted=False).init_data().fit_portfolio(year=year, xgb_params=params) st.write('## Results') st.subheader("Portfolio Weights:") dict_weights = pb1.get_dict_weights() #print(dict_weights.keys()) st.write(pd.DataFrame(np.array([list(dict_weights.values())]), columns=list(dict_weights.keys()), index=['Weight'])) st.subheader("Results from original XGBoost model:") col_1a, col_2a, col_3a = st.columns(3) col_1a.markdown("**Model accuracy (%):**") col_1a.write(round(pb1.get_model_accuracy() * 100,2)) col_2a.markdown("**Portfolio return:**") col_2a.write(round(pb1.get_portfolio_return(),4)) col_3a.markdown("**Portfolio Sharpe Ratio:**") col_3a.write(round(pb1.get_portfolio_sharpe_ratio(),4)) col_2a.text("Benchmark return:") col_2a.write(round(pb1.get_benchmark_return(),4)) col_3a.text("Benchmark Sharpe Ratio:") col_3a.write(round(pb1.get_benchmark_sharpe_ratio(),4))
python
keyboard.send_key("<left>")
python
from __future__ import print_function x = 42 print("Hello, World")
python
# -*- coding: utf-8 -*- # # Copyright nexB Inc. and others. All rights reserved. # http://nexb.com and https://github.com/nexB/scancode-toolkit/ # The ScanCode software is licensed under the Apache License version 2.0. # Data generated with ScanCode require an acknowledgment. # ScanCode is a trademark of nexB Inc. # # You may not use this software except in compliance with the License. # You may obtain a copy of the License at: http://apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software distributed # under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR # CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. # # When you publish or redistribute any data created with ScanCode or any ScanCode # derivative work, you must accompany this data with the following acknowledgment: # # Generated with ScanCode and provided on an "AS IS" BASIS, WITHOUT WARRANTIES # OR CONDITIONS OF ANY KIND, either express or implied. No content created from # ScanCode should be considered or used as legal advice. Consult an Attorney # for any legal advice. # ScanCode is a free software code scanning tool from nexB Inc. and others. # Visit https://github.com/nexB/scancode-toolkit/ for support and download. from __future__ import absolute_import from __future__ import print_function import argparse from fnmatch import fnmatchcase import os from shutil import copy from subprocess import run import sys import tempfile from commoncode.fileutils import resource_iter python_version = str(sys.version_info[0]) + str(sys.version_info[1]) py_abi = '{0}cp{1}{0}'.format('*', python_version) def generate_req_text(find_links, req_file, package_name=None, upgrade=False): """ Generate a requirement file as `req_file` of all dependencies wheels and sdists present at the find_links.If a `package_name` is provided it will be updated to its latest version and if upgrade option is called,it will be updated all the wheels to the latest version. """ thirdparty = resource_iter(find_links, with_dirs=False) dependencies = [ files for files in thirdparty if fnmatchcase(files, '*py3*') or fnmatchcase(files, py_abi) or ( fnmatchcase(files, '*tar.gz*') and not fnmatchcase(files, '*py2-ipaddress-3.4.1.tar.gz*') ) ] with tempfile.TemporaryDirectory() as temp_dir: for deps in dependencies: copy(deps, temp_dir) pip_args = [ 'pip-compile', '--generate-hashes', '--find-links', temp_dir, '--output-file', req_file, '--allow-unsafe', '--pip-args', '--no-index', ] if upgrade: pip_args.append('--upgrade') if package_name: pip_args.extend(['--upgrade-package', package_name]) run(pip_args) def main_with_args(args: str) -> None: parser = argparse.ArgumentParser( description="""Generate a requirement file as `requirement` of all dependencies wheels and sdists present at the find_links.If a `upgrade-package` option is called it will update provided `package_name` to its latest version and if upgrade option is called,it will be update all the wheels/sdist to the latest version. """, formatter_class=argparse.RawDescriptionHelpFormatter, ) parser.add_argument( '--find-links', help='Required: Look for archives in this directory or on this HTML page', type=str, required=True, ) parser.add_argument( '--requirement', help='Required: Requirement file name.', type=str, required=True, ) parser.add_argument( '--upgrade', help='Optional: Try to upgrade all dependencies to their latest versions', action='store_true', ) parser.add_argument( '--upgrade-package', help='Optional: Specify particular packages to upgrade.', type=str, default=None, ) args = parser.parse_args() find_links = args.find_links requirement = args.requirement upgrade_package = args.upgrade_package or None upgrade = args.upgrade or False generate_req_text( find_links=find_links, req_file=requirement, package_name=upgrade_package, upgrade=upgrade, ) def main() -> None: main_with_args(sys.argv[1:]) if __name__ == '__main__': main()
python
# -*- coding: utf-8 -*- from . import misc, excepts from .compat import unicode, bool_compat @bool_compat class FD(object): TAGS = { # тэг: (тип значения, признак обязательности соблюдения длины, максимальная длина) # телефон или электронный адрес покупателя 1008: (unicode, False, 64) } CAST = { unicode: lambda x: x.encode('cp866') } LEN = { bytes: (len, lambda value, len_: value.ljust(len_)) } def __init__(self, tags=None): """ Структура для работы с фискальными данными. :type tags: dict :param tags: словарь {тэг: значение} """ self.data = {} self.b_data = bytearray() tags = tags or {} for item in tags.items(): self.set_value(*item) def set_value(self, tag, value): """ Установить значение для тэга. :type tag: int :param tag: тэг :param value: значение тэга """ try: type_, len_req, len_max = self.TAGS.get(tag) except TypeError: raise excepts.FDError(u'Тэг {} не поддерживается'.format(tag)) value_type = type(value) if value_type != type_: raise excepts.FDError( u'Значение для тэга {} должно быть {}, получено {}'.format(tag, type_, value_type) ) cast_call = self.CAST.get(value_type) if cast_call: value = cast_call(value) value_type = type(value) len_call, fill_call = self.LEN[value_type] if len_call(value) > len_max: raise excepts.FDError(u'Тэг {} имеет ограничение длины - {} байта'.format(tag, len_max)) if len_req: value = fill_call(value, len_max) value_len = len_call(value) if not value_len: return self.data[tag] = value self.b_data.extend( misc.bytearray_concat( misc.CAST_SIZE['2'](tag), misc.CAST_SIZE['2'](len_call(value)), value ) ) def dump(self): """ Получить TVL структуру, готовую для передачи в команду send_tlv_struct. :rtype: bytes :return: tlv строка """ return bytes(self.b_data) def __bool__(self): return bool(self.data)
python
# Copyright 2017 The Bazel Authors. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Utility functions for working with strings, lists, and files in Skylark.""" def full_label(lbl): """Converts a label to full format, e.g. //a/b/c -> //a/b/c:c. If the label is already in full format, it returns it as it is, otherwise appends the folder name as the target name. Args: lbl: The label to convert to full format. Returns: The label in full format, or the original input if it was already in full format. """ if lbl.find(":") != -1: return lbl target_name = lbl.rpartition("/")[-1] return lbl + ":" + target_name def group_files_by_directory(files, extensions, attr): """Groups files based on their containing directories. This function examines each file in |files| and looks for a containing directory with the given extension. It then returns a dictionary that maps the directory names to the files they contain. For example, if you had the following files: - some/path/foo.images/bar.png - some/path/foo.images/baz.png - some/path/quux.images/blorp.png Then passing the extension "images" to this function would return: { "some/path/foo.images": depset([ "some/path/foo.images/bar.png", "some/path/foo.images/baz.png" ]), "some/path/quux.images": depset([ "some/path/quux.images/blorp.png" ]) } If an input file does not have a containing directory with the given extension, the build will fail. Args: files: An iterable of File objects. extensions: The list of extensions of the containing directories to return. The extensions should NOT include the leading dot. attr: The attribute to associate with the build failure if the list of files has an element that is not in a directory with the given extension. Returns: A dictionary whose keys are directories with the given extension and their values are the sets of files within them. """ grouped_files = {} paths_not_matched = {} ext_info = [(".%s" % e, len(e) + 1) for e in extensions] for f in files: path = f.path not_matched = True for search_string, search_string_len in ext_info: # Make sure the matched string either has a '/' after it, or occurs at # the end of the string (this lets us match directories without requiring # a trailing slash but prevents matching something like '.xcdatamodeld' # when passing 'xcdatamodel'). The ordering of these checks is also # important, to ensure that we can handle cases that occur when working # with common Apple file structures, like passing 'xcdatamodel' and # correctly parsing paths matching 'foo.xcdatamodeld/bar.xcdatamodel/...'. after_index = -1 index_with_slash = path.find(search_string + "/") if index_with_slash != -1: after_index = index_with_slash + search_string_len else: index_without_slash = path.find(search_string) after_index = index_without_slash + search_string_len # If the search string wasn't at the end of the string, it must have a # non-slash character after it (because we already checked the slash case # above), so eliminate it. if after_index != len(path): after_index = -1 if after_index != -1: not_matched = False container = path[:after_index] contained_files = grouped_files.setdefault(container, default = []) contained_files.append(f) # No need to check other extensions break if not_matched: paths_not_matched[path] = True if len(paths_not_matched): formatted_files = "[\n %s\n]" % ",\n ".join(paths_not_matched.keys()) fail("Expected only files inside directories named with the extensions " + "%r, but found: %s" % (extensions, formatted_files), attr) return {k: depset(v) for k, v in grouped_files.items()}
python
#!/usr/bin/env python import time import threading import logging import sys import signal import hollywood.actor import hollywood.exceptions # Clean shutdown with ctrl-c def signal_handler(sig, frame): System.halt() sys.exit(1) signal.signal(signal.SIGINT, signal_handler) class System(object): addresses = {} processes = {} actor_lock = threading.RLock() @classmethod def spawn(cls, actor_class, *args, **kwargs): if actor_class in cls.addresses: return cls.address[actor_class] actor = actor_class(*args, **kwargs) cls.processes[actor.address.name] = actor cls.addresses[actor_class] = actor.address return actor.address @classmethod def halt(cls): logging.warning("Shutdown sequence initiated.") with cls.actor_lock: address_list = cls.processes.keys() for address in address_list: logging.info("Halting: %s", address) cls.processes[address].stop() del cls.processes[address] while threading.active_count() > 1: for thread in threading.enumerate(): logging.warning("Actor blocking termination: %s", thread.name) time.sleep(1) logging.warning("Shutdown complete.") @classmethod def alive(cls): return len(cls.processes)
python
import random import time try: from colorama import init, Fore, Back init(autoreset=True) blue = Fore.LIGHTCYAN_EX red = Fore.LIGHTRED_EX green = Fore.GREEN res = Fore.RESET except: if (int(input("\nYou don't have colorama installed, do you want to install it? (Type 1 if you do): "))==1): try: import pip pip.main(['install','colorama']) from colorama import init, Fore, Back init(autoreset=True) blue = Fore.LIGHTCYAN_EX red = Fore.LIGHTRED_EX green = Fore.GREEN res = Fore.RESET except: blue = red = green = res = "" else: blue = red = green = res = "" ################################################################################## # https://www.activestate.com/resources/quick-reads/how-to-install-python-packages-using-a-script/ # pyinstaller --onefile main.py ################################################################################## alpha = "abcdefghijklmnopqrstuvwyz" ################################################################################## def start(): global land, size, visLand, mines, difficulty #----------------------------------------------------------------------------- size = int(input("\nSize (e.g.: 5): ")) #----------------------------------------------------------------------------- land = [] visLand = [] # visible land for x in range(size): land.append([None]*size) visLand.append(["�"]*size) #----------------------------------------------------------------------------- difficulty = input("Difficulty (e-Easy, m-Medium, h-Hard, g-God): ").lower() if (difficulty == "e"): mines = int(0.2*size**2) elif (difficulty == "m"): mines = int(0.4*size**2) elif (difficulty == "h"): mines = int(0.6*size**2) elif (difficulty == "g"): mines = int(0.8*size**2) #----------------------------------------------------------------------------- minesLoc = [] for x in range(mines): r = random.randint(0,size**2-1) if (r not in minesLoc): minesLoc.append(r) for i in range(len(minesLoc)): x = minesLoc[i]//size # 11//4 = 2 y = minesLoc[i]%size # 11%4 = 3 land[x][y] = "⨀" #----------------------------------------------------------------------------- fillLand() playGame() ################################################################################## def visualize(land,size): vis = "\n " for x in range(size): vis += " " + alpha[x] + " " vis += " \n" for x in range(size): vis += alpha[x].upper() + " |" for y in range(size): if (str(land[x][y]) == "�"): vis += (" " + str(land[x][y]) + " ") else: vis += (" " + blue+str(land[x][y]) + res + " ") vis += "|\n" print(vis) ################################################################################## def fillLand(): for i in range(size): for j in range(size): nMines = 0 if (not land[i][j]): # If there isn't a mine if (j==0): if (land[i][j+1]=="⨀"): nMines += 1 elif (j==size-1): if (land[i][j-1]=="⨀"): nMines += 1 else: if (land[i][j+1]=="⨀"): nMines += 1 if (land[i][j-1]=="⨀"): nMines += 1 if (i==0): if (land[i+1][j]=="⨀"): nMines += 1 if (j==0): if (land[i+1][j+1]=="⨀"): nMines += 1 elif (j==size-1): if (land[i+1][j-1]=="⨀"): nMines += 1 else: if (land[i+1][j+1]=="⨀"): nMines += 1 if (land[i+1][j-1]=="⨀"): nMines += 1 elif (i==size-1): if (land[i-1][j]=="⨀"): nMines += 1 if (j==0): if (land[i-1][j+1]=="⨀"): nMines += 1 elif (j==size-1): if (land[i-1][j-1]=="⨀"): nMines += 1 else: if (land[i-1][j+1]=="⨀"): nMines += 1 if (land[i-1][j-1]=="⨀"): nMines += 1 else: if (land[i+1][j]=="⨀"): nMines += 1 if (land[i-1][j]=="⨀"): nMines += 1 if (j==0): if (land[i+1][j+1]=="⨀"): nMines += 1 if (land[i-1][j+1]=="⨀"): nMines += 1 elif (j==size-1): if (land[i+1][j-1]=="⨀"): nMines += 1 if (land[i-1][j-1]=="⨀"): nMines += 1 else: if (land[i+1][j+1]=="⨀"): nMines += 1 if (land[i-1][j+1]=="⨀"): nMines += 1 if (land[i+1][j-1]=="⨀"): nMines += 1 if (land[i-1][j-1]=="⨀"): nMines += 1 land[i][j] = nMines ################################################################################## def playGame(): play = True while play: visualize(visLand,size) print("\nThere are "+blue+str(mines)+res+" mines\n") while True: loc = input("What location do you want to choose? (e.g. Aa): ").lower() loc_x = alpha.index(loc[0]) loc_y = alpha.index(loc[1]) if (loc_x < size and loc_x >=0 and loc_y < size and loc_y >= 0): break else: print("\nInvalid location!\n") if (visLand[loc_x][loc_y] == "�"): if (land[loc_x][loc_y] == "⨀"): play = False print("\n"+red+"YOU LOST \n") else: visLand[loc_x][loc_y] = land[loc_x][loc_y] else: print("\nYou've already chosen that location!\n") locLeft = 0 for x in range(size): for y in range(size): if (visLand[x][y] == "�"): locLeft += 1 if (locLeft == mines): visualize(visLand,size) print("\n"+green+"YOU WON!\n") play = False ################################################################################## while True: t1 = round(time.time() * 1000) start() t2 = round(time.time() * 1000) if (difficulty=="e"): diff = "EASY" elif (difficulty=="m"): diff = "MEDIUM" elif (difficulty=="h"): diff = "HARD" elif (difficulty=="g"): diff = "GOD" print("Finished a "+str(size)+"x"+str(size)+" "+diff+" game in "+str(round((t2-t1)/1000,2))+" seconds") r = input("\nType 1 to play again: ") if (r != "1"): break
python
from rxbp.init.initsubscriber import init_subscriber from rxbp.init.initsubscription import init_subscription from rxbp.mixins.flowablemixin import FlowableMixin from rxbp.observable import Observable from rxbp.observerinfo import ObserverInfo from rxbp.scheduler import Scheduler from rxbp.schedulers.trampolinescheduler import TrampolineScheduler from rxbp.subscriber import Subscriber class SubscribeOnFlowable(FlowableMixin): def __init__(self, source: FlowableMixin, scheduler: Scheduler = None): super().__init__() self._source = source self._scheduler = scheduler def unsafe_subscribe(self, subscriber: Subscriber): scheduler = self._scheduler or TrampolineScheduler() updated_subscriber = init_subscriber( scheduler=subscriber.scheduler, subscribe_scheduler=scheduler, ) subscription = self._source.unsafe_subscribe(updated_subscriber) class SubscribeOnObservable(Observable): def observe(_, observer_info: ObserverInfo): def action(_, __): return subscription.observable.observe(observer_info) disposable = scheduler.schedule(action) return disposable observable = SubscribeOnObservable() return init_subscription(observable=observable)
python
# -*- coding: utf-8 -*- # # Copyright (c) 2017, Enthought, Inc. # All rights reserved. # # This software is provided without warranty under the terms of the BSD # license included in LICENSE.txt and may be redistributed only # under the conditions described in the aforementioned license. The license # is also available online at http://www.enthought.com/licenses/BSD.txt # # Thanks for using Enthought open source! """ This module provides routines to assist display of cellular automata in textual form. """ from collections import defaultdict DEFAULT_PALETTE = defaultdict(lambda: u'●', {0: u' '}) FOREST_PALETTE = defaultdict( lambda: u'●', # default { 0: u' ', 1: u'\U0001F332', # evergreen tree 2: u'\U0001f525', # fire 3: u'\U0001F342', # falling leaves } ) def automaton_to_text(automaton, palette=DEFAULT_PALETTE): """ Generate a text representation of the automaton states. Parameters ---------- automaton : CellularAutomaton instance The automaton to render. palette : str A string in which the symbol of the nth character will be used to represent the nth state. Returns ------- text : str The textual representation of the state of the automaton. """ states = automaton.states joiners = [u''] if states.ndim >= 2: joiners = [u'\n'] + joiners if states.ndim >= 3: joiners = [u'\n\n\n'] * (states.ndim - 2) + joiners return _render_states(states, palette, joiners) def _render_states(states, palette, joiners): """ Recursively render dimensions of the states, joining with next joiner. """ joiner = joiners[0] if len(joiners) == 1: parts = (palette[state] for state in states) else: parts = (_render_states(sheet, joiners[1:]) for sheet in states) return joiner.join(parts)
python
import scancel import sys if __name__ == "__main__": scancel.main(sys.argv)
python
#!/usr/bin/python """Command set for the Onkyo TX-NR708. This file was automatically created by raw_commands_massager.py from the source file: onkyo_raw_commands.txt Each command group in the documentation has a seperate list, and all commands are available in ALL.""" ###################### ### Power ###################### POWER = [ ("Power ON", "PWR01"), ("Power OFF", "PWR00"), ] ###################### ### Audio ###################### AUDIO = [ ("Mute", "AMT01"), ("UnMute", "AMT00"), ("Volume Up", "MVLUP"), ("Volume Down", "MVLDOWN"), ] ###################### ### Source Select ###################### SOURCE_SELECT = [ ("VIDEO1 VCR/DVR", "SLI00"), ("VIDEO2 CBL/SAT", "SLI01"), ("Game", "SLI02"), ("Auxiliary", "SLI03"), ("VIDEO5 AUX2", "SLI04"), ("Computer/PC", "SLI05"), ("VIDEO6", "SLI05"), ("VIDEO7", "SLI06"), ("BD/DVD", "SLI10"), ("TAPE(1)", "SLI20"), ("TAPE2", "SLI21"), ("PHONO", "SLI22"), ("CD", "SLI23"), ("FM", "SLI24"), ("AM", "SLI25"), ("TUNER", "SLI26"), ("MUSIC SERVER", "SLI27"), ("INTERNET RADIO", "SLI28"), ("USB", "SLI29"), ("MULTI CH", "SLI30"), ("XM*1", "SLI31"), ("SIRIUS*1", "SLI32"), ("Selector Position Wrap-Around Up", "SLIUP"), ("Selector Position Wrap-Around Down", "SLIDOWN"), ] ###################### ### Speaker AB Control ###################### SPEAKER_AB_CONTROL = [ ("Speaker A Off", "SPA00"), ("Speaker A On", "SPA01"), ("Speaker B Off", "SPB00"), ("Speaker B On", "SPB01"), ] ###################### ### Sound modes ###################### SOUND_MODES = [ ("STEREO", "LMD00"), ("DIRECT", "LMD01"), ("SURROUND", "LMD02"), ("FILM", "LMD03"), ("THX", "LMD04"), ("ACTION", "LMD05"), ("MUSICAL", "LMD06"), ("MONO MOVIE", "LMD07"), ("ORCHESTRA", "LMD08"), ("UNPLUGGED", "LMD09"), ("STUDIO-MIX", "LMD0A"), ("TV LOGIC", "LMD0B"), ("ALL CH STEREO", "LMD0C"), ("THEATER-DIMENSIONAL", "LMD0D"), ("ENHANCED 7/ENHANCE", "LMD0E"), ("MONO", "LMD0F"), ("PURE AUDIO", "LMD11"), ("MULTIPLEX", "LMD12"), ("FULL MONO", "LMD13"), ("DOLBY VIRTUAL", "LMD14"), ("5.1ch Surround", "LMD40"), ("Straight Decode*1", "LMD40"), ("Dolby EX/DTS ES", "LMD41"), ("Dolby EX*2", "LMD41"), ("THX Cinema", "LMD42"), ("THX Surround EX", "LMD43"), ("U2/S2 Cinema/Cinema2", "LMD50"), ("MusicMode", "LMD51"), ("Games Mode", "LMD52"), ("PLII/PLIIx Movie", "LMD80"), ("PLII/PLIIx Music", "LMD81"), ("Neo6 Cinema", "LMD82"), ("Neo6 Music", "LMD83"), ("PLII/PLIIx THX Cinema", "LMD84"), ("Neo6 THX Cinema", "LMD85"), ("PLII/PLIIx Game", "LMD86"), ("Neural Surr*3", "LMD87"), ("Neural THX", "LMD88"), ("PLII THX Games", "LMD89"), ("Neo6 THX Games", "LMD8A"), ("Listening Mode Wrap-Around Up", "LMDUP"), ("Listening Mode Wrap-Around Down", "LMDDOWN"), ] ###################### ### OSD ###################### OSD = [ ("OSD Up", "OSDUP"), ("OSD Down", "OSDDOWN"), ("OSD Right", "OSDRIGHT"), ("OSD Left", "OSDLEFT"), ("OSD Enter", "OSDENTER"), ("OSD Home", "OSDHOME"), ("OSD Menu", "OSDMENU"), ("OSD Exit", "OSDEXIT"), ("Display", "DIFTG"), ] ALL = POWER + AUDIO + SOURCE_SELECT + SPEAKER_AB_CONTROL + SOUND_MODES + OSD
python
import cProfile import palingrams_optimized cProfile.run('palingrams_optimized.find_palingrams()')
python
from setuptools import setup setup( name="horsephrase", version="0.6.0", description="Secure password generator.", long_description=( "Like http://correcthorsebatterystaple.net/ except it's not a web page" " which is logging your passwords and sending them all to the NSA." ), author="Glyph", author_email="glyph@twistedmatrix.com", maintainer="Glyph", maintainer_email="glyph@twistedmatrix.com", url="https://github.com/glyph/horsephrase/", packages=["horsephrase"], package_data=dict( horsephrase=["*.txt"], ), install_requires=['six==1.11.0'], license="MIT", classifiers=[ "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6" ], entry_points={ "console_scripts": [ "horsephrase = horsephrase.__main__:main", ], }, extras_require={ ':python_version == "2.7"': ['mock'], 'dev': ['requests'], } )
python
from core.errors import ANCCError class ParseError(ANCCError): def __init__(self, lookahead_literal, non_terminal, *args): super().__init__(*args) self.lookahead_literal = lookahead_literal self.non_terminal = non_terminal def __str__(self): return super().__str__() + ", unexpected {} in {}".format( self.lookahead_literal.verbose_name, self.non_terminal.verbose_name )
python
# -*- coding: utf-8 -*- import os import time import argparse import os.path as osp import sys sys.path.append('.') import torch import torch.nn as nn import torch.nn.functional as F from torch.utils.data import DataLoader from torchvision.transforms import Compose from network.mainnetwork import VLFTrans from utils import AverageMeter from dataloader.vid_anchor_test import ReferDataset_VID as ReferDataset_test from dataloader.vid_anchor_test import test_collate_fn from utils.transforms import Resize, ToTensor, Normalize import numpy as np import random from dist_utils import * from general_util import * parser = argparse.ArgumentParser( description='Locater evaluation routine') def load_args(parser): parser.add_argument('--data-root', type=str, default='./datasets/') parser.add_argument('--snapshot', default=None) # parser.add_argument('--local_rank', type=int, default=0) parser.add_argument('-j', '--workers', default=16, type=int, metavar='N', help='number of data loading workers (default: 16)') # Training procedure settings parser.add_argument('--no-cuda', action='store_true', default=False, help='Do not use cuda to train model') parser.add_argument('--log-interval', type=int, default=200, metavar='N', help='report interval') parser.add_argument('--no-pin-memory', default=False, action='store_true', help='enable CUDA memory pin on DataLoader') # Model settings parser.add_argument('--size', default=320, type=int, help='image size') parser.add_argument("--in-chans", default=3, type=int) parser.add_argument('--N1', default=3, type=int) parser.add_argument('--N1_test', default=-1, type=int) # * for testing (temp, spat, mul) parser.add_argument('--dataset', default='A2D', type=str) parser.add_argument('--testing-type', default='NORM', type=str) return parser parser = load_args(parser) args = parser.parse_args() args.local_rank = int(os.environ["LOCAL_RANK"]) if args.N1_test == -1: args.N1_test = args.N1 args.distributed = False if 'WORLD_SIZE' in os.environ: args.distributed = int(os.environ['WORLD_SIZE']) > 1 sync_print('Use distributed method', args) args.world_size = 1 if args.distributed: torch.cuda.set_device(args.local_rank) torch.distributed.init_process_group(backend='nccl', init_method='env://') args.world_size = torch.distributed.get_world_size() args_dict = vars(args) args.cuda = not args.no_cuda and torch.cuda.is_available() image_size = (args.size, args.size) input_transform_val = Compose([ ToTensor(), Resize(image_size, test=True), Normalize( mean=[0.5, 0.5, 0.5], std=[0.5, 0.5, 0.5]) ]) refer_val = ReferDataset_test(dataset_root=args.data_root, transform=input_transform_val, N1=args.N1_test, dataset=args.dataset, testing_type=args.testing_type) val_sampler = None if args.distributed: val_sampler = torch.utils.data.distributed.DistributedSampler(refer_val) val_loader = DataLoader(refer_val, batch_size=1, pin_memory=(not args.no_pin_memory), shuffle=False, sampler=val_sampler, num_workers=args.workers, collate_fn=test_collate_fn ) sync_print('Dataset loaded', args) net = VLFTrans(img_dim=args.size, in_chans=args.in_chans) assert osp.exists(args.snapshot) sync_print('Loading state dict from: {0}'.format(args.snapshot), args) snapshot_dict = torch.load(args.snapshot, map_location='cpu') net.load_state_dict(snapshot_dict) if args.distributed: net = torch.nn.SyncBatchNorm.convert_sync_batchnorm(net).cuda() net = torch.nn.parallel.DistributedDataParallel( net, find_unused_parameters=True, device_ids=[args.local_rank], output_device=args.local_rank ) else: net = net.cuda() sync_print('Argument list to program', args) sync_print('\n'.join(['--{0} {1}'.format(arg, args_dict[arg]) for arg in args_dict]), args) sync_print('\n\n', args) def compute_mask_IU(masks, target, only_label=False): assert(target.shape[-2:] == masks.shape[-2:]) temp = (masks * target) intersection = temp.sum() if only_label: union = target.sum() else: union = ((masks + target) - temp).sum() return intersection, union def evaluate(): net.eval() save_count = 0 with torch.no_grad(): eval_seg_iou_list = [.5, .6, .7, .8, .9] cum_I = 0 cum_U = 0 meaniou = 0 seg_correct = torch.zeros(len(eval_seg_iou_list),1).cuda().squeeze() seg_total = torch.tensor([0.]).cuda() start_time = time.time() for seq_idx, (seq_dataset, global_images, words) in enumerate(val_loader): if seq_idx % (args.log_interval//args.world_size) == 0 or batch_idx == (len(val_loader) - 1): sync_print('Evaluating [{}+{}] {}/{} sequence....'.format(seq_dataset.seq_name, str(seq_dataset.obj_n), int(seq_idx),len(refer_val)//args.world_size), args) seq_dataloader=DataLoader(seq_dataset, batch_size=1, shuffle=False, num_workers=args.workers//args.world_size, pin_memory=True) if args.distributed: net.module._reset_memory() else: net._reset_memory() # * process global feature if args.cuda: global_images = global_images.cuda() for key in words: words[key] = words[key].cuda() if args.distributed: net.module._prep_global_mem(global_images, words) else: net._prep_global_mem(global_images, words) # * valid_labels = seq_dataset.labels for batch_idx, (imgs, mask) in enumerate(seq_dataloader): if args.cuda: imgs = imgs.cuda() mask = mask.float().cuda() out_masks, _attns = net(vis=imgs, lang=words) # * example w/ ground-truth if mask.min() != -1.: out_mask = out_masks[-1] out = out_mask.squeeze() out = torch.sigmoid(out) out = out.unsqueeze(0).unsqueeze(0) out = F.interpolate( out, size=(mask.shape[-2], mask.shape[-1]), mode='bilinear', align_corners=True) mask = mask.squeeze() seg_total += 1 thresholded_out = (out > 0.5).float().data inter, union = compute_mask_IU(thresholded_out, mask) cum_I += inter cum_U += union if union == 0: iou = 1. else: iou = inter / union meaniou += iou for idx, seg_iou in enumerate(eval_seg_iou_list): seg_correct[idx] += (iou >= seg_iou) # Print final accumulated IoUs if args.distributed: seg_total = reduce_tensor(seg_total, args) seg_correct = reduce_tensor(seg_correct, args) meaniou = reduce_tensor(meaniou, args) cum_I = reduce_tensor(cum_I, args) cum_U = reduce_tensor(cum_U, args) overall = cum_I / cum_U mean = meaniou / seg_total if args.local_rank == 0: print('-' * 32) print('Precision@X') for idx, seg_iou in enumerate(eval_seg_iou_list): rep_idx = eval_seg_iou_list.index(eval_seg_iou_list[idx]) print('precision@{:s} = {:.5f}'.format( str(seg_iou), float(seg_correct[rep_idx] / seg_total))) print('-' * 32) print('mAP.5:.95 = {:.5f}'.format(float(torch.mean(seg_correct)) / float(seg_total))) print('-' * 32) # Print maximum IoU if args.local_rank == 0: print('Evaluation done. Elapsed time: {:.3f} (s) '.format( time.time() - start_time)) print('o-iou: {:<15.13f} | m-iou: {:<15.13f}'.format(float(overall), float(mean))) return float(overall), float(mean) if __name__ == '__main__': evaluate()
python
from typing import List from ..regularization_operator import RegularizationOperator from .block_operator import BlockOperator from .null_operator import NullOperator def make_block_operator(operator_list: List) -> RegularizationOperator: """ Given a list of regularization operators, creates a block operator as a direct sum. :param operator_list: :return: The resulting operator might either be a :py:class:`BlockOperator', or a :py:class:`NullOperator` if all operators in the list are of instances of :py:class:`NullOperator`. """ # Check if all operators in the list are null. all_null = True for op in operator_list: if not isinstance(op, NullOperator): all_null = False # If yes, return a NullOperator of the right dimension. if all_null: # If yes, return a NullOperator of the right dimension. combined_dim = 0 for op in operator_list: combined_dim += op.dim block_operator = NullOperator(combined_dim) # If not, return a BlockOperator. else: block_operator = BlockOperator(operator_list) return block_operator
python
#!/usr/bin/env python2 from setuptools import setup, find_packages setup(name='polyjit.buildbot', version='0.1', url='https://github.com/PolyJIT/buildbot', packages=find_packages(), install_requires=["buildbot>=0.9.7", "buildbot-console-view", "buildbot-waterfall-view", "buildbot-www", "treq"], author="Andreas Simbuerger", author_email="simbuerg@fim.uni-passau.de", description="Buildbot drivers.", license="MIT", classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'Topic :: Software Development :: Testing', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 2' ], keywords="polyjit buildbot", )
python
from django.contrib import admin from apps.sistema.models import registro,compra,tarjetas # Register your models here. admin.site.register(registro) admin.site.register(compra) admin.site.register(tarjetas)
python
""" Fixer for dictcomp and setcomp: {foo comp_for} -> set((foo comp_for)) {foo:bar comp_for} -> dict(((foo, bar) comp_for))""" from lib2to3 import fixer_base from lib2to3.pytree import Node, Leaf from lib2to3.pygram import python_symbols as syms from lib2to3.pgen2 import token from lib2to3.fixer_util import parenthesize, Name, Call, LParen, RParen from ..fixer_util import commatize def tup(args): return parenthesize(Node(syms.testlist_gexp, commatize(args))) class FixDctsetcomp(fixer_base.BaseFix): PATTERN = """atom< '{' dictsetmaker< n1=any [col=':' n2=any] comp_for=comp_for< 'for' any 'in' any [comp_if<'if' any>] > > '}' >""" def transform(self, node, results): comp_for = results.get("comp_for").clone() is_dict = bool(results.get("col")) # is it a dict? n1 = results.get("n1").clone() if is_dict: n2 = results.get("n2").clone() n2.prefix = " " impl_assign = tup((n1, n2)) else: impl_assign = n1 our_gencomp = Node(syms.listmaker, [(impl_assign),(comp_for)]) if is_dict: new_node = Node(syms.power, [Name("dict"), parenthesize(Node(syms.atom, [our_gencomp]))]) else: new_node = Node(syms.power, [Name("set"), parenthesize(Node(syms.atom, [our_gencomp]))]) new_node.prefix = node.prefix return new_node
python
'''Autogenerated by get_gl_extensions script, do not edit!''' from OpenGL import platform as _p, constants as _cs, arrays from OpenGL.GL import glget import ctypes EXTENSION_NAME = 'GL_ARB_map_buffer_range' def _f( function ): return _p.createFunction( function,_p.GL,'GL_ARB_map_buffer_range',False) _p.unpack_constants( """GL_MAP_READ_BIT 0x1 GL_MAP_WRITE_BIT 0x2 GL_MAP_INVALIDATE_RANGE_BIT 0x4 GL_MAP_INVALIDATE_BUFFER_BIT 0x8 GL_MAP_FLUSH_EXPLICIT_BIT 0x10 GL_MAP_UNSYNCHRONIZED_BIT 0x20""", globals()) @_f @_p.types(ctypes.c_void_p,_cs.GLenum,_cs.GLintptr,_cs.GLsizeiptr,_cs.GLbitfield) def glMapBufferRange( target,offset,length,access ):pass @_f @_p.types(None,_cs.GLenum,_cs.GLintptr,_cs.GLsizeiptr) def glFlushMappedBufferRange( target,offset,length ):pass def glInitMapBufferRangeARB(): '''Return boolean indicating whether this extension is available''' from OpenGL import extensions return extensions.hasGLExtension( EXTENSION_NAME )
python
import numpy as np # Nonlinearity functions (Numpy implementation) nl_linear = lambda x: x nl_tanh = lambda x: np.tanh(x) nl_sigmoid = lambda x: 1./(1+np.exp(-x)) nl_rect = lambda x: np.clip(x, 0, np.inf) nl_shallow_rect = lambda x: np.clip(0.1*x, 0, np.inf) nl_clip = lambda x: np.clip(x, 0, 1) nl_softplus = lambda x: np.log(1. + np.exp(x)) #
python
from tensorhive.core.managers.TensorHiveManager import TensorHiveManager from connexion import NoContent from flask_jwt_extended import jwt_required @jwt_required def get_metrics(hostname: str, metric_type: str = None): try: infrastructure = TensorHiveManager().infrastructure_manager.infrastructure resource_data = infrastructure[hostname]['CPU'] # No data about GPU assert resource_data if metric_type is None: # Put all gathered metric data for each GPU result = {uuid: cpu_data['metrics'] for uuid, cpu_data in resource_data.items()} else: # Put only requested metric data for each GPU result = {uuid: gpu_data['metrics'][metric_type] for uuid, gpu_data in resource_data.items()} except (KeyError, AssertionError): content, status = NoContent, 404 else: content, status = result, 200 finally: return content, status
python
import pandas as pd import numpy as np from sklearn.metrics import mean_squared_error from sklearn.model_selection import KFold def k_fold(n, value_est): kf = KFold(n_splits=5) def expend_feature_test(df): """ Return a dataframe with expension of sequence for test set prediction Args: df (Dataframe): same format as train Returns: sub_df: a dataframe with: number of rows = seq_scored columns name = [id, base, base_structure_type, base_predicted_loop_type] """ if (df.shape[0] != df.id.nunique()): print('repetition in RNA sequnence, clean dataframe first') return # dose the same as retunr None, which exit the function else: col_names = ['id','base', 'base_structure_type', 'base_predicted_loop_type'] #dataframe creation using list of lists # loop for each unique sequence sub_data = [] for row_i in df.index: #loop for the legth of sequnece score (trian length is different from test) serie_i = df.loc[row_i] #panda series object seq_length = serie_i['seq_length'] for seq_i in range (seq_length): seq_data = [serie_i['id'] + '_' + str(seq_i), serie_i['sequence'][seq_i], serie_i['structure'][seq_i], serie_i['predicted_loop_type'][seq_i]] sub_data.append(seq_data) sub_df = pd.DataFrame(sub_data, columns =col_names, dtype = float) return sub_df def fianle_transform_without_SN (dataframe, replace_type): # filter with SN_filter criteria #use expend to change feature data_filter_seq = expend_feature(dataframe) #make feature to integer for r_types in replace_type: data_filter_seq = data_filter_seq.replace(r_types) return data_filter_seq def expend_feature (df): """ Return a dataframe with expension of sequence Args: df (Dataframe): same format as train Returns: sub_df: a dataframe with: number of rows = seq_scored columns name:[id, base, base_structure_type, base_predicted_loop_type, reactivity_error, deg_error_Mg_pH10,deg_error_pH10, deg_error_Mg_50C, deg_error_50C, reactivity, deg_Mg_pH10, deg_pH10, deg_Mg_50C, deg_50C] """ if (df.shape[0] != df.id.nunique()): print('repetition in RNA sequnence, clean dataframe first') return # dose the same as retunr None, which exit the function else: col_names = ['id','base', 'base_structure_type', 'base_predicted_loop_type', 'reactivity_error', 'deg_error_Mg_pH10', 'deg_error_pH10', 'deg_error_Mg_50C', 'deg_error_50C', 'reactivity', 'deg_Mg_pH10', 'deg_pH10', 'deg_Mg_50C', 'deg_50C'] #dataframe creation using list of lists # loop for each unique sequence sub_data = [] for row_i in df.index: #loop for the legth of sequnece score (trian length is different from test) serie_i = df.loc[row_i] #panda series object seq_length = serie_i['seq_scored'] for seq_i in range (seq_length): seq_data = [serie_i['id'], serie_i['sequence'][seq_i], serie_i['structure'][seq_i], serie_i['predicted_loop_type'][seq_i], serie_i['reactivity_error'][seq_i], serie_i['deg_error_Mg_pH10'][seq_i], serie_i['deg_error_pH10'][seq_i], serie_i['deg_error_Mg_50C'][seq_i], serie_i['deg_error_50C'][seq_i], serie_i['reactivity'][seq_i], serie_i['deg_Mg_pH10'][seq_i], serie_i['deg_pH10'][seq_i], serie_i['deg_Mg_50C'][seq_i], serie_i['deg_50C'][seq_i]] sub_data.append(seq_data) sub_df = pd.DataFrame(sub_data, columns =col_names, dtype = float) return sub_df def count(x,colonne) : return (colonne==x).sum() def add_features(seq): # Coumpt the numerous of the group gr=1 seq['count_gr']=gr for i in range (1,seq.shape[0]): if ((seq.id.iloc[i]!=seq.id.iloc[i-1]) or (seq.base_predicted_loop_type.iloc[i]!=seq.base_predicted_loop_type.iloc[i-1])): gr=gr+1 seq.count_gr[i]=gr #Count the number of letters in the group seq['N']=seq['count_gr'].apply(lambda x: count(x,seq['count_gr'])) return seq def fianle_transform (dataframe): # filter with SN_filter criteria data_filter = dataframe[dataframe["SN_filter"] == 1] #use expend to change feature data_filter_seq = expend_feature(data_filter) #add feature data_filter_seq = add_features(data_filter_seq) #make feature onehot encoding one_hot_base = pd.get_dummies(data_filter_seq['base'],drop_first=True) #drop "A" one_hot_loop = pd.get_dummies(data_filter_seq['base_predicted_loop_type'],drop_first=True) # drop "B" one_hot_struct = pd.get_dummies(data_filter_seq['base_structure_type'],drop_first=True)# drop "(" #final dataset data_filter_seq = data_filter_seq.drop(["base", "base_predicted_loop_type","base_structure_type"], axis = 1) df = pd.concat([data_filter_seq,one_hot_base, one_hot_loop ,one_hot_struct], axis = 1) return df def MSE(vec1,vec2): Res = 0 n=len(vec1) for i in range (0,n): #looping through each element of the list diff_squared = (vec1[i]-vec2[i])**2 Res = Res + diff_squared #taking a sum of all the differences return Res/n #dividing summation by total values to obtain average def mcrmsc_yannick(y_true, y_pred): y_true = y_true.values rmse = [] for i in range (5): rmse.append(mean_squared_error(y_true[:,i], y_pred[:,i], squared = False)) mcrmsc = np.mean(rmse) return (mcrmsc, rmse) def ajout_N_predicted_loop(data): for i in letters_loop_type: col = np.where(data[i]==1, data['N'], 0) data ["N_"+i] = col col_B=np.where(data.N_E+data.N_H+data.N_I+data.N_M+data.N_S+data.N_X==0,data.N,0) data["N_B"]=col_B return data def MCRMSE(y_true, y_pred): """ Return loss between true and prediction, with mean column wise root mean squared error from sklearn.metrics import mean_squared_error Args: y_true: matrix y_pred: matrix Returns: output: double """ y_true = y_true.values n,Nt = y_pred.shape Res = 0 for i in range(0,Nt): Res = Res + mean_squared_error(y_true[:,i], y_pred[:,i], squared = False) return Res/ Nt class linear_mcrmse: """ Parameters ---------- X : `dataframe`, shape=(n_samples,n_features) features y : `dataframe`, shape=(n_samples, n_y) double lamb : `float`, value of the regularization parameter beta : `numpy.array`, shape=(n_features,n_y) weight matrix """ def __init__(self,X,y,lamb, n_ite = 10000, precision = 10^-4, beta = None): self.X_ = np.asanyarray(X) self.y_ = np.asanyarray(y) self.lamb_ = lamb self.n_samples_, self.n_features_ = X.shape self.n_y_ = y.shape[1] self.beta_ = np.random.random((self.n_features_, self.n_y_)) if (beta == None): self.beta_ = np.zeros((self.n_features_, self.n_y_)) else: self.beta_ = beta self.n_ite_ = n_ite self.precision_ = precision def loss(self): # compute mcrmsc loss y_pred = np.dot(self.X_, self.beta_) rmse = [] for i in range (self.n_y_): rmse.append(mean_squared_error(y_pred[:,i],self.y_[:,i], squared = False)) mcrmsc = np.mean(rmse) return (mcrmsc, rmse) def grad_loss(self): # the gradiant for mcrmsc gradiant rmse = self.loss()[1] grad = np.zeros((self.n_features_, self.n_y_)) y_pred = np.dot(self.X_, self.beta_) for j in range(self.n_y_): # loop over columns for i in range(self.n_features_): #loop over line grad_temp = 0 for x in range(self.n_y_): #loop over column grad_temp += self.X_[i, x]*(y_pred[i, x] - self.y_[i, x])/ rmse[x] / self.n_features_ grad[i, j] = grad_temp/ self.n_y_ return (grad) def fit (self): self.cost_ = [[100],] y_pred = np.dot(self.X_, self.beta_) cost = self.loss() self.cost_.append(cost) for _ in range(self.n_ite_): cost = self.loss()[0] if (cost > self.cost_[-2][0]): break else: gradient_vector = self.grad_loss() self.beta_ -= (self.lamb_)/self.n_features_ * gradient_vector cost = self.loss() self.cost_.append(cost) return self def predict(self, X_test): """ Predicts the value after the model has been trained. Parameters ---------- X_test : array-like, shape = [n_samples, n_features] Test samples Returns ------- Predicted value """ return np.dot(X_test, self.beta_)
python
#!/usr/bin/env python # Copyright (c) 2015, Carnegie Mellon University # All rights reserved. # Authors: David Butterworth <dbworth@cmu.edu> # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # - Redistributions of source code must retain the above copyright notice, this # list of conditions and the following disclaimer. # - Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # - Neither the name of Carnegie Mellon University nor the names of its # contributors may be used to endorse or promote products derived from this # software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. """ This is a demo of Rviz Tools for python which tests all of the available functions by publishing lots of Markers in Rviz. """ # Python includes import numpy import random # ROS includes import roslib import rospy from geometry_msgs.msg import Pose, Point, Quaternion, Vector3, Polygon from tf import transformations # rotation_matrix(), concatenate_matrices() import rviz_tools_py as rviz_tools # Initialize the ROS Node rospy.init_node('test', anonymous=False, log_level=rospy.INFO, disable_signals=False) # Define exit handler def cleanup_node(): print "Shutting down node" markers.deleteAllMarkers() rospy.on_shutdown(cleanup_node) markers = rviz_tools.RvizMarkers('/map', 'visualization_marker') while not rospy.is_shutdown(): # Axis: T = transformations.translation_matrix((1,-2,0)) scale = Vector3(1.0,0.2,0.2) # x=length, y=height, z=height markers.publishArrow(T, 'blue', scale, 5.0) # pose, color, scale, lifetime P = Pose(Point(0,0,1),Quaternion(1,0,-1,0)) arrow_length = 2.0 # single value for length (height is relative) markers.publishArrow(P, 'pink', arrow_length, 5.0) # pose, color, arrow_length, lifetime rospy.Rate(1).sleep() #1 Hz
python
# import os # import yaml # from click.testing import CliRunner # from mangum.cli.commands import init # def test_cli(tmpdir) -> None: # name = "test" # bucket_name = "my-bucket-1" # region_name = "ap-southeast-1" # runner = CliRunner() # config_dir = tmpdir.mkdir("tmp") # os.chdir(config_dir) # requirements_file_path = os.path.join(config_dir, "requirements.txt") # config_file_path = os.path.join(config_dir, "mangum.yml") # expected_config = { # "name": name, # "code_dir": "app", # "handler": "asgi.handler", # "bucket_name": bucket_name, # "region_name": region_name, # "timeout": 300, # } # result = runner.invoke(init, [name, bucket_name, region_name]) # with open(config_file_path, "r") as f: # assert f.read() == yaml.dump( # expected_config, default_flow_style=False, sort_keys=False # ) # with open(requirements_file_path, "r") as f: # assert f.read() == "mangum\n" # assert result.exit_code == 0 # def test_cli_no_optional_args(tmpdir) -> None: # name = "test" # runner = CliRunner() # config_dir = tmpdir.mkdir("tmp") # os.chdir(config_dir) # requirements_file_path = os.path.join(config_dir, "requirements.txt") # config_file_path = os.path.join(config_dir, "mangum.yml") # expected_config = { # "name": name, # "code_dir": "app", # "handler": "asgi.handler", # "bucket_name": None, # "region_name": None, # "timeout": 300, # } # result = runner.invoke(init, [name]) # with open(config_file_path, "r") as f: # assert f.read() == yaml.dump( # expected_config, default_flow_style=False, sort_keys=False # ) # with open(requirements_file_path, "r") as f: # assert f.read() == "mangum\n" # assert result.exit_code == 0
python
from importlib import import_module from importlib.machinery import SourceFileLoader from chainercmd.config.base import ConfigBase class Extension(ConfigBase): def __init__(self, **kwargs): required_keys = [] optional_keys = [ 'dump_graph', 'Evaluator', 'ExponentialShift', 'LinearShift', 'LogReport', 'observe_lr', 'observe_value', 'snapshot', 'PlotReport', 'PrintReport', ] super().__init__( required_keys, optional_keys, kwargs, self.__class__.__name__) class Custom(ConfigBase): def __init__(self, **kwargs): required_keys = [ 'file', 'name' ] optional_keys = [ 'args', ] super().__init__( required_keys, optional_keys, kwargs, self.__class__.__name__) def get_custum_extension_from_config(custom_extension_config): config = Custom(**custom_extension_config) loader = SourceFileLoader(config.name, config.file) mod = loader.load_module() if hasattr(config, 'args'): ext = getattr(mod, custom_extension_config['name'])(**config.args) else: ext = getattr(mod, custom_extension_config['name'])() return ext
python
from copy import deepcopy from ..base import BaseAutoModel class BaseHeteroModelMaintainer(BaseAutoModel): def __init__(self, num_features, num_classes, device, dataset=None, **kwargs): super().__init__(num_features, num_classes, device, **kwargs) self._registered_parameters = {} if dataset is not None: self.from_dataset(dataset) def from_dataset(self, dataset): raise NotImplementedError # consider moving this to inner classes def register_parameter(self, key: str, value): self._registered_parameters[key] = value setattr(self, key, value) def destroy_parameter(self, key): if key in self._registered_parameters: return self._registered_parameters.pop(key) return None def from_hyper_parameter(self, hp, **kwargs): kw = deepcopy(self._kwargs) kw.update(kwargs) ret_self = self.__class__( self.input_dimension, self.output_dimension, self.device, **kw ) hp_now = dict(self.hyper_parameters) hp_now.update(hp) ret_self.hyper_parameters = hp_now for key, value in self._registered_parameters.items(): ret_self.register_parameter(key, value) ret_self.initialize() return ret_self
python
from django.conf import settings # IPStack Configuration # Use it like this: # GET '%scheck%s' % (IPSTACK_BASE_URL, IPSTACK_APIKEY) # notice the url param 'check' IPSTACK_BASE_URL = 'http://api.ipstack.com/' IPSTACK_APIKEY = '?access_key=%s' % settings.IPSTACK_APIKEY def get_ipstack_url(ip): """Return the ready-to-use ipstack api url.""" return '%s%s%s' % (IPSTACK_BASE_URL, ip, IPSTACK_APIKEY)
python
import os import requests import sys import re from configs.config import Config from utils.vpn import connect import logging class hold_proxy(object): def __init__(self): self.proxy = os.environ.get("http_proxy") self.logger = logging.getLogger(__name__) def disable(self): os.environ["http_proxy"] = "" os.environ["HTTP_PROXY"] = "" os.environ["https_proxy"] = "" os.environ["HTTPS_PROXY"] = "" def enable(self): if self.proxy: os.environ["http_proxy"] = self.proxy os.environ["HTTP_PROXY"] = self.proxy os.environ["https_proxy"] = self.proxy os.environ["HTTPS_PROXY"] = self.proxy class proxy_env(object): def __init__(self, args): self.logger = logging.getLogger(__name__) self.args = args self.vpn = Config().vpn() self.session = requests.session() def Load(self): proxies = None proxy = {} aria2c_proxy = [] if self.args.proxy and self.vpn["proxies"]: proxies = self.vpn["proxies"] self.logger.info( "\nProxy Status: Activated Local Proxy (%s)", proxies) elif self.args.privtvpn: self.logger.info("\nProxy Status: Activated Private VPN") proxy.update({"port": self.vpn["private"]["port"]}) proxy.update({"user": self.vpn["private"]["email"]}) proxy.update({"pass": self.vpn["private"]["passwd"]}) if "pvdata.host" in self.args.privtvpn: proxy.update({"host": self.args.privtvpn}) else: proxy.update( {"host": connect(code=self.args.privtvpn).privateVPN()} ) proxies = self.vpn["private"]["http"].format( email=proxy["user"], passwd=proxy["pass"], ip=proxy["host"], port=proxy["port"], ) elif self.args.nordvpn: proxy.update({"port": self.vpn["nordvpn"]["port"]}) proxy.update({"user": self.vpn["nordvpn"]["username"]}) proxy.update({"pass": self.vpn["nordvpn"]["password"]}) host = '' if "nordvpn.com" in self.args.nordvpn: host = self.args.nordvpn elif re.search(r'[a-z]{2}\d+', self.args.nordvpn): # configured server id host = f"{self.args.nordvpn}.nordvpn.com" else: host = connect(code=self.args.nordvpn).get_nordvpn_server() proxy.update({"host": host}) self.logger.info( "\nProxy Status: Activated NordVPN (%s)", host.split('.')[0][:2].upper()) proxies = self.vpn["nordvpn"]["http"].format( email=proxy["user"], passwd=proxy["pass"], ip=proxy["host"], port=proxy["port"], ) if proxy.get("host"): aria2c_proxy.append( "--https-proxy={}:{}".format(proxy.get("host"), proxy.get("port")) ) if proxy.get("user"): aria2c_proxy.append( "--https-proxy-user={}".format(proxy.get("user"))) if proxy.get("pass"): aria2c_proxy.append( "--https-proxy-passwd={}".format(proxy.get("pass"))) ip_info = self.verify_proxy(proxies) return ip_info def verify_proxy(self, proxy): if proxy: scheme = ('http', 'https')['https' in proxy] proxies = {scheme: proxy} self.session.proxies = proxies res = self.session.get('https://ipinfo.io/json', timeout=5) if res.ok: ip_info = res.json() if proxy: ip_info.update({"proxy": proxies}) else: ip_info.update({"proxy": ''}) self.logger.info('ip: %s (%s)', ip_info['ip'], ip_info['country']) return ip_info else: self.logger.error(res.text)
python
from rest_framework import generics, authentication, permissions from rest_framework import status from django.http.response import HttpResponse from django.contrib.auth import authenticate, login from rest_framework_jwt.settings import api_settings from mentorbot.serializers.mentordetailsserializers import MentorProfileSerializer, MentorUserSerializer, TokenSerializer from .models import MentorProfile, MentorUser from mentorbot.settings import base from rest_framework import generics from rest_framework import filters jwt_payload_handler = api_settings.JWT_PAYLOAD_HANDLER jwt_encode_handler = api_settings.JWT_ENCODE_HANDLER class MentorDetailsCreateUser(generics.CreateAPIView): '''creates the user''' queryset = MentorUser.objects.all() serializer_class = MentorUserSerializer permission_classes = (permissions.AllowAny,) def post(self, request, *args, **kwargs): password = request.data.get("password", "") email = request.data.get("email", "") if not password and not email: return HttpResponse( "password and email is required to register a user", status=status.HTTP_400_BAD_REQUEST ) else: MentorUser.objects.create_user(password=password, email=email) return HttpResponse("User created succesfully", status=status.HTTP_201_CREATED) class MentorDetailsListUsers(generics.ListAPIView): """Return a list of all users.""" authentication_classes = (authentication.TokenAuthentication,) permission_classes = (permissions.IsAdminUser,) queryset = MentorUser.objects.all() serializer_class = MentorUserSerializer class MentorDetailsListUser(generics.ListAPIView): """Return a list of one users.""" queryset = MentorUser.objects.all() serializer_class = MentorProfileSerializer class MentorDetailsUpdateUser(generics.UpdateAPIView): '''Updates user details''' queryset = MentorUser.objects.all() serializer_class = MentorProfileSerializer permission_classes = (permissions.IsAuthenticated,) class MentorDestroyUserNoProfile(generics.DestroyAPIView): '''Deletes User when profile has not been saved succesfully''' queryset = MentorUser.objects.all() serializer_class = MentorProfileSerializer class MentorDetailsDestroyUser(generics.DestroyAPIView): '''Deletes User''' queryset = MentorUser.objects.all() serializer_class = MentorProfileSerializer permission_classes = (permissions.IsAuthenticated,) """|------------------------------------------------------|""" # class MentorProfileCreate(generics.CreateAPIView): # queryset = MentorProfile.objects.all() # serializer_class = MentorProfileSerializer class MentorProfileListUsers(generics.ListAPIView): queryset = MentorProfile.objects.all() serializer_class = MentorProfileSerializer permission_classes = (permissions.IsAuthenticated,) class MentorProfileListUser(generics.ListAPIView): '''returns one profile''' queryset = MentorProfile.objects.all() serializer_class = MentorProfileSerializer class FieldListView(generics.ListAPIView): queryset = MentorProfile.objects.all() serializer_class = MentorProfileSerializer filter_backends = (filters.SearchFilter,) search_fields = ('field_name') class MentorProfileUpdate(generics.UpdateAPIView): '''update one profile''' queryset = MentorProfile.objects.all() serializer_class = MentorProfileSerializer permission_classes = (permissions.IsAuthenticated,) class MentorProfileDestroy(generics.DestroyAPIView): '''destroy one profile''' queryset = MentorProfile.objects.all() serializer_class = MentorProfileSerializer permission_classes = (permissions.IsAuthenticated,) """----------------------------------------------------------""" class LoginView(generics.CreateAPIView): """ POST auth/login/ """ # This permission class will overide the global permission # class setting permission_classes = (permissions.AllowAny,) queryset = MentorUser.objects.all() def post(self, request, *args, **kwargs): email = request.data.get("email", "") password = request.data.get("password", "") user = authenticate(request, email=email, password=password) if user is not None: login(request, user) serializer = TokenSerializer(data={ # using drf jwt utility functions to generate a token "token": jwt_encode_handler( jwt_payload_handler(user) )}) serializer.is_valid() return HttpResponse(serializer.data, status.HTTP_200_OK) return HttpResponse(data={ "message": "User does not exist, please enter your credentials again" }, status=status.HTTP_401_UNAUTHORIZED) class LogoutView(generics.CreateAPIView): queryset = MentorUser.objects.all() def get(self, request, format=None): request.MentorUser.auth_token.delete() return HttpResponse(status=status.HTTP_200_OK)
python
import math import keras from keras import optimizers from keras import regularizers from keras.callbacks import LearningRateScheduler, TensorBoard, ModelCheckpoint from keras.datasets import cifar10 from keras.initializers import he_normal from keras.layers import Conv2D, Dense, Input, add, Activation, GlobalAveragePooling2D, multiply, Reshape from keras.layers import Lambda, concatenate from keras.layers.normalization import BatchNormalization from keras.models import Model from keras.preprocessing.image import ImageDataGenerator cardinality = 4 # 4 or 8 or 16 or 32 base_width = 64 inplanes = 64 expansion = 4 img_rows, img_cols = 32, 32 img_channels = 3 num_classes = 10 batch_size = 64 # 120 iterations = 781 # 416 # total data / iterations = batch size epochs = 300 weight_decay = 0.0005 mean = [125.307, 122.95, 113.865] std = [62.9932, 62.0887, 66.7048] from keras import backend as K if ('tensorflow' == K.backend()): import tensorflow as tf config = tf.ConfigProto() config.gpu_options.allow_growth = True sess = tf.Session(config=config) def scheduler(epoch): if epoch < 150: return 0.1 if epoch < 225: return 0.01 return 0.001 def resnext(img_input, classes_num): global inplanes def add_common_layer(x): x = BatchNormalization(momentum=0.9, epsilon=1e-5)(x) x = Activation('relu')(x) return x def group_conv(x, planes, stride): h = planes // cardinality groups = [] for i in range(cardinality): group = Lambda(lambda z: z[:, :, :, i * h: i * h + h])(x) groups.append(Conv2D(h, kernel_size=(3, 3), strides=stride, kernel_initializer=he_normal(), kernel_regularizer=regularizers.l2(weight_decay), padding='same', use_bias=False)( group)) x = concatenate(groups) return x def residual_block(x, planes, stride=(1, 1)): D = int(math.floor(planes * (base_width / 64.0))) C = cardinality shortcut = x y = Conv2D(D * C, kernel_size=(1, 1), strides=(1, 1), padding='same', kernel_initializer=he_normal(), kernel_regularizer=regularizers.l2(weight_decay), use_bias=False)(shortcut) y = add_common_layer(y) y = group_conv(y, D * C, stride) y = add_common_layer(y) y = Conv2D(planes * expansion, kernel_size=(1, 1), strides=(1, 1), padding='same', kernel_initializer=he_normal(), kernel_regularizer=regularizers.l2(weight_decay), use_bias=False)(y) y = add_common_layer(y) if stride != (1, 1) or inplanes != planes * expansion: shortcut = Conv2D(planes * expansion, kernel_size=(1, 1), strides=stride, padding='same', kernel_initializer=he_normal(), kernel_regularizer=regularizers.l2(weight_decay), use_bias=False)(x) shortcut = BatchNormalization(momentum=0.9, epsilon=1e-5)(shortcut) y = squeeze_excite_block(y) y = add([y, shortcut]) y = Activation('relu')(y) return y def residual_layer(x, blocks, planes, stride=(1, 1)): x = residual_block(x, planes, stride) inplanes = planes * expansion for i in range(1, blocks): x = residual_block(x, planes) return x def squeeze_excite_block(input, ratio=16): init = input channel_axis = 1 if K.image_data_format() == "channels_first" else -1 # compute channel axis filters = init._keras_shape[channel_axis] # infer input number of filters se_shape = (1, 1, filters) if K.image_data_format() == 'channels_last' else ( filters, 1, 1) # determine Dense matrix shape se = GlobalAveragePooling2D()(init) se = Reshape(se_shape)(se) se = Dense(filters // ratio, activation='relu', kernel_initializer='he_normal', kernel_regularizer=regularizers.l2(weight_decay), use_bias=False)(se) se = Dense(filters, activation='sigmoid', kernel_initializer='he_normal', kernel_regularizer=regularizers.l2(weight_decay), use_bias=False)(se) x = multiply([init, se]) return x def conv3x3(x, filters): x = Conv2D(filters=filters, kernel_size=(3, 3), strides=(1, 1), padding='same', kernel_initializer=he_normal(), kernel_regularizer=regularizers.l2(weight_decay), use_bias=False)(x) return add_common_layer(x) def dense_layer(x): return Dense(classes_num, activation='softmax', kernel_initializer=he_normal(), kernel_regularizer=regularizers.l2(weight_decay))(x) # build the resnext model x = conv3x3(img_input, 64) x = residual_layer(x, 3, 64) x = residual_layer(x, 3, 128, stride=(2, 2)) x = residual_layer(x, 3, 256, stride=(2, 2)) x = GlobalAveragePooling2D()(x) x = dense_layer(x) return x if __name__ == '__main__': # load data (x_train, y_train), (x_test, y_test) = cifar10.load_data() y_train = keras.utils.to_categorical(y_train, num_classes) y_test = keras.utils.to_categorical(y_test, num_classes) x_train = x_train.astype('float32') x_test = x_test.astype('float32') # - mean / std for i in range(3): x_train[:, :, :, i] = (x_train[:, :, :, i] - mean[i]) / std[i] x_test[:, :, :, i] = (x_test[:, :, :, i] - mean[i]) / std[i] # build network img_input = Input(shape=(img_rows, img_cols, img_channels)) output = resnext(img_input, num_classes) senet = Model(img_input, output) print(senet.summary()) # load weight # senet.load_weights('senet.h5') # set optimizer sgd = optimizers.SGD(lr=.1, momentum=0.9, nesterov=True) senet.compile(loss='categorical_crossentropy', optimizer=sgd, metrics=['accuracy']) # set callback tb_cb = TensorBoard(log_dir='./senet/', histogram_freq=0) # tensorboard log change_lr = LearningRateScheduler(scheduler) # learning rate scheduler ckpt = ModelCheckpoint('./ckpt_senet.h5', save_best_only=False, mode='auto', period=10) # checkpoint cbks = [change_lr, tb_cb, ckpt] # set data augmentation print('Using real-time data augmentation.') datagen = ImageDataGenerator(horizontal_flip=True, width_shift_range=0.125, height_shift_range=0.125, fill_mode='constant', cval=0.) datagen.fit(x_train) # start training senet.fit_generator(datagen.flow(x_train, y_train, batch_size=batch_size), steps_per_epoch=iterations, epochs=epochs, callbacks=cbks, validation_data=(x_test, y_test)) senet.save('senet.h5')
python
import os from copy import deepcopy from .base import BoundaryCondition from .base import BCFile from inspect import cleandoc default_value = 0.0064879 field_template = cleandoc(""" /*--------------------------------*- C++ -*----------------------------------*\ ========= | \\ / F ield | OpenFOAM: The Open Source CFD Toolbox \\ / O peration | Website: https://openfoam.org \\ / A nd | Version: 9 \\/ M anipulation | \*---------------------------------------------------------------------------*/ FoamFile { format ascii; class volScalarField; location "0/shell"; object epsilon; } // * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * // dimensions [ 0 2 -3 0 0 0 0 ]; internalField <internal_field_value>; boundaryField { #includeEtc "caseDicts/setConstraintTypes" <patches> } // ************************************************************************* // """) class Epsilon(BCFile): default_value = default_value field_template = field_template type = 'epsilon' default_entry = cleandoc(""" ".*" { type epsilonWallFunction; value $internalField; } """) class EpsilonWallFunction(BoundaryCondition): template = cleandoc(""" { type epsilonWallFunction; value <value>; } """) def __init__(self, *args, **kwargs): BoundaryCondition.__init__(self, *args, **kwargs) self.value = kwargs.get('value', 1e-6) self.object = 'epsilon' def generate_dict_entry(self, *args, **kwargs): template = deepcopy(self.template) template = template.replace('<value>', str(self.value)) return template class TurbulentMixingLengthDissipationRateInlet(BoundaryCondition): template = cleandoc(""" { type turbulentMixingLengthDissipationRateInlet; mixingLength <mixing_length>; value <value>; } """) def __init__(self, *args, **kwargs): """ This boundary condition provides an inlet condition for turbulent kinetic energy dissipation rate, i.e. \c epsilon, based on a specified mixing length. The patch values are calculated using: \f[ \epsilon_p = \frac{C_{\mu}^{0.75} k^{1.5}}{L} \f] where \epsilon_p | Patch epsilon values [m2/s3] C_\mu | Empirical model constant retrived from turbulence model k | Turbulent kinetic energy [m2/s2] L | Mixing length scale [m] https://www.openfoam.com/documentation/guides/latest/api/turbulentMixingLengthDissipationRateInletFvPatchScalarField_8H_source.html :param args: :param kwargs: """ BoundaryCondition.__init__(self, *args, **kwargs) self.mixing_length = kwargs.get('mixing_length', 1e-6) self.value = kwargs.get('value', 1e-6) self.object = 'epsilon' def generate_dict_entry(self, *args, **kwargs): template = deepcopy(self.template) template = template.replace('<value>', str(self.value)) template = template.replace('<mixing_length>', str(self.mixing_length)) return template
python
#################### Importing Requirements #################### import spacy import pandas as pd import warnings import os warnings.filterwarnings('ignore') nlp = spacy.load("ur_model") # Make sure to Download and Install model from https://github.com/mirfan899/Urdu ################## Longest COmmon Subsequence ################## def lcs(X, Y, m, n): """ Recurrent implementation for finding LCS between 2 sentences X: Tokenized Sentence 1 Y: Tokenized Sentence 2 m: length of X n: length of Y """ if m == 0 or n == 0: # To deal with any redundant new lines return 0; elif X[m-1].similarity(Y[n-1]) == 1: # If cosine similarity between two tokens is 1 then they are same. return 1 + lcs(X, Y, m-1, n-1); else: return max(lcs(X, Y, m, n-1), lcs(X, Y, m-1, n)); text = open("./data.txt", encoding="utf8").read() # Reading raw text sentences = text.split("\n") # extracting sentences from the raw text ######### Word Tokenization using SpaCy ########## dict = {} for i in range (0, len(sentences)): dict[i] = nlp (sentences[i]) i += 1 ################## Calculating LCS between Sentencing and storing them into a 2D List ##################### arr2D = [[0 for col in range(len(sentences))] for row in range(len(sentences))] # Initializing list of lists for row in range (0, len(sentences)): for column in range (0, len(sentences)): arr2D[row][column]= lcs(dict[row], dict[column], len(dict[row]), len(dict[column])) ########################## Converting List of Lists into a pandas dataframe ################################ df = pd.DataFrame.from_records(arr2D) print("\n","The Longest Common Subsequences between sentences (ZERO INDEXED) are:" , "\n") print(df)
python
#!/usr/bin/env python # -*- coding: utf-8 -*- # # This file is part of the pyqualtrics package. # For copyright and licensing information about this package, see the # NOTICE.txt and LICENSE.txt files in its top-level directory; they are # available at https://github.com/Baguage/pyqualtrics # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # Use setuptools without bundling it # https://pythonhosted.org/setuptools/setuptools.html#using-setuptools-without-bundling-it # Note this means user will need network connection when running setup.py # import ez_setup # ez_setup.use_setuptools(version="18.2") from setuptools import setup, find_packages setup( name="pyqualtrics", version="0.6.7", author="Alex Vyushkov", author_email="pyqualtrics@gmail.com", description="Unofficial python SDK for Qualtrics API", license="Apache License 2.0", keywords="API Qualtrics Survey SDK Social Science Psychology", url="https://github.com/Baguage/pyqualtrics", # find_packages() takes a source directory and two lists of package name patterns to exclude and include. # If omitted, the source directory defaults to the same directory as the setup script. packages=find_packages(exclude=["examples"]), # https://pythonhosted.org/setuptools/setuptools.html#using-find-packages install_requires=["requests"], scripts=['bin/qualtrics.cmd', 'bin/qualtrics'], package_data = { # If any package contains *.qsf or *.rst files, include them: '': ['*.qsf', '*.rst'], }, test_suite="tests", classifiers=[ "Development Status :: 4 - Beta", "License :: OSI Approved :: Apache Software License", "Intended Audience :: Developers", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.5", "Topic :: Software Development :: Libraries :: Python Modules", ], )
python
import unittest class TestBaseStegoImage(unittest.TestCase): def test__pack_pixels(self): self.fail() def test__insert_data(self): self.fail() def test__extract_data(self): self.fail() if __name__ == '__main__': unittest.main()
python
import textwrap import requests import jwt import enum from cryptography.x509 import load_pem_x509_certificate from cryptography.hazmat.backends import default_backend TIMEOUT = 2 # timeout for all HTTP requests class Errors(enum.Enum): MetadataUrlUnreachable = "Unable to reach metadata URL." MetadataUrlHttpNok = "Response from metadata URL is not ok (200ish)." JWKsURIFormat = "Unable to obtain jwks_uri from metadata URL." TokenEndpoint = "Unable to obtain token endpoint from metadata URL." ProxyValues = "Invalid proxy values provided." UnableObtainToken = "Unable to obtain OAuth token." InvalidToken = "Invalid input token." TokenMissingKID = "Token header missing key id." UnableObtainKeys = "Unable to obtain public keys from Azure." PublicKey = "Error while obtaining public certificate for key id." InvalidJwt = "Token validation error." class OAuth(): """ An OAuth class for Azure. """ def __init__(self, tenant_id, proxy=None, load_uris=True): """Initializes an object for this class. Args: tenant_id (str): Azure tennant id. proxy (str, optional): a proxy connection if you don't have direct internet access. Defaults to None. E.g.: "http://myproxy:8000" load_uris (bool, optional): load URIs for JWKS and token endpoint. Defaults to True. default_metadata (bool, optional): Used for unit testing. Defaults to True. Raises: SystemError: Unable to obtain metadata from URL. KeyError: Unable to obtain value from metadata dictionary. ValueError: Invalid values provided to class initializer. """ if proxy is not None: self.proxies = { "http": proxy, "https": proxy } else: self.proxies = None self.tenant_id = tenant_id ms_base = "https://login.microsoftonline.com" self.metadata_url = f"{ms_base}/{tenant_id}/v2.0/.well-known"\ "/openid-configuration" # Set later to facilitate unit testing if load_uris: self.load_uris() else: self.jwks_uri = None self.token_endpoint = None def load_uris(self): try: metadata = requests.get( self.metadata_url, proxies=self.proxies, timeout=TIMEOUT) if metadata.ok: metadata = metadata.json() else: resp = metadata.status_code print(f"Status code from metadata URL: {resp}") raise SystemError(Errors.MetadataUrlHttpNok.value) except Exception as e: err = "{} Reason: {}".format( Errors.MetadataUrlUnreachable.value, str(e)) print(err) raise SystemError(Errors.MetadataUrlUnreachable.value) self.jwks_uri = metadata.get('jwks_uri', None) if self.jwks_uri is None: raise KeyError(Errors.JWKsURIFormat.value) self.token_endpoint = metadata.get('token_endpoint', None) if self.token_endpoint is None: raise KeyError(Errors.TokenEndpoint.value) def get_token(self, client_id, client_secret, scope): """Returns JWT for a given AzureAD scope or an error message if that was not possible. Args: client_id (str): the id of your application (calling app id) client_secret (str): the client secret of your application scope (str): scope you want to call in Azure. E.g.: api://342ba2-5342-af43/.default Returns: (str, str): a JWT and error strings. One of them will be None. """ header = { "content-type": "application/x-www-form-urlencoded" } body = { "client_id": client_id, "client_secret": client_secret, "scope": scope, "grant_type": "client_credentials", } try: response = requests.post(url=self.token_endpoint, headers=header, proxies=self.proxies, data=body) if not response.ok: error = f"{Errors.UnableObtainToken.value} " \ f"Detail: {response.text}" return None, error except Exception as e: return None, str(e) token = response.json().get("access_token", None) if token is None: return None, Errors.UnableObtainToken.value # It all worked if you got here! return token, None def get_claims(self, token, app_id): """Returns the claims for the input token, given it has been issued for the given resource and that it is valid. Args: token (str): a Json Web Token (JWT) app_id (str): the application id in Azure to which the JWT was issued. Returns: dict, str: the claims for the given token in case it is valid for your application OR an error string in case it is not. """ if not isinstance(token, str): return (None, Errors.InvalidToken.value) # Parse token parts = token.split('.') if len(parts) != 3: return (None, Errors.InvalidToken.value) (header, payload, signature) = parts # Retrieve key id from JWT header header = jwt.get_unverified_header(token) kid = header.get('kid', None) if kid is None: return (None, Errors.TokenMissingKID.value) # Obtain x509 public key used to generate token. public_certificate, err = self._get_x509(kid) if err is not None: return None, err # Verify signature try: claims = jwt.decode( token, public_certificate, audience=[app_id, f"api://{app_id}"], algorithms=["RS256"]) return claims, None except Exception as e: error = f"{Errors.InvalidJwt.value} Details:{str(e)}" return None, error def _get_x509(self, kid): """Obtains public certificate used by the IdP with the given key id Args: kid (str): key id Returns: x509certificate, str: the public certificate used with the provided kid and the error string """ try: response = requests.get(url=self.jwks_uri, proxies=self.proxies) if not response.ok: return None, Errors.UnableObtainKeys.value keys = response.json() keys = keys.get("keys", None) if keys is None: return None, Errors.UnableObtainKeys.value except Exception as e: error = f"{Errors.UnableObtainKeys.value} Detail: {str(e)}" return None, error # Verify which key from Azure matches the key id in the input token for key in keys: kid_from_azure = key.get("kid", None) if kid == kid_from_azure: # Now get the public certificate that follows this key id public_cert = key.get("x5c", None) if public_cert is None: return None, Errors.PublicKey.value public_cert = public_cert[0] # Generate certificate format from certificate string certificate = '-----BEGIN CERTIFICATE-----\n' certificate += '\n'.join(textwrap.wrap(public_cert, 64)) certificate += '\n'+'-----END CERTIFICATE-----\n' cert_obj = load_pem_x509_certificate(certificate.encode(), default_backend()) return cert_obj.public_key(), None return None, Errors.PublicKey.value
python
''' This program tests simples operations( addition,multiplication) on constants and matrices tensors (matmul) ''' import tensorflow as tf tf.enable_eager_execution() a = tf.constant(1) b = tf.constant(1) c = tf.add(a, b) # equivalent of a + b print(c) A = tf.constant([[1, 2], [3, 4]]) B = tf.constant([[5, 6], [7, 8]]) C = tf.matmul(A, B) print(C)
python
ENTRY_POINT = 'vowels_count' #[PROMPT] FIX = """ Add more test cases. """ def vowels_count(s): """Write a function vowels_count which takes a string representing a word as input and returns the number of vowels in the string. Vowels in this case are 'a', 'e', 'i', 'o', 'u'. Here, 'y' is also a vowel, but only when it is at the end of the given word. Example: >>> vowels_count("abcde") 2 >>> vowels_count("ACEDY") 3 """ #[SOLUTION] vowels = "aeiouAEIOU" n_vowels = sum(c in vowels for c in s) if s[-1] == 'y' or s[-1] == 'Y': n_vowels += 1 return n_vowels #[CHECK] def check(candidate): # Check some simple cases assert candidate("abcde") == 2, "Test 1" assert candidate("Alone") == 3, "Test 2" assert candidate("key") == 2, "Test 3" assert candidate("bye") == 1, "Test 4" assert candidate("keY") == 2, "Test 5" assert candidate("bYe") == 1, "Test 6" assert candidate("ACEDY") == 3, "Test 7" # Check some edge cases that are easy to work out by hand. assert True, "This prints if this assert fails 2 (also good for debugging!)"
python
import numpy as np import time feature_dict = {} for i in range(190190): if i % 1001 == 1 : t1 = time.time() class_video_name = np.random.randint(190) np_as_line = np.random.rand(4014) if class_video_name in feature_dict.keys(): feature_dict[class_video_name] = np.concatenate( (feature_dict[class_video_name], np.expand_dims(np_as_line, axis=0))) else: feature_dict[class_video_name] = np.expand_dims(np_as_line, axis=0) if i % 1001 ==0 and i !=0: print((time.time()-t1)/1001) print('wow')
python
# -*- coding: utf-8 -*- # Copyright (c) 2018-2022, libracore (https://www.libracore.com) and contributors # For license information, please see license.txt from __future__ import unicode_literals import frappe from frappe.model.document import Document from datetime import datetime import json from mvd.mvd.doctype.druckvorlage.druckvorlage import get_druckvorlagen from frappe.utils.data import today from frappe.utils.background_jobs import enqueue from frappe import _ class Mahnung(Document): # this will apply all payment reminder levels in the sales invoices def update_reminder_levels(self): for invoice in self.sales_invoices: sales_invoice = frappe.get_doc("Sales Invoice", invoice.sales_invoice) sales_invoice.payment_reminder_level = invoice.reminder_level sales_invoice.save(ignore_permissions=True) return def reset_reminder_levels(self): for invoice in self.sales_invoices: sales_invoice = frappe.get_doc("Sales Invoice", invoice.sales_invoice) sales_invoice.payment_reminder_level = int(invoice.reminder_level) - 1 sales_invoice.save(ignore_permissions=True) return # apply payment reminder levels on submit (server based) def on_submit(self): self.update_reminder_levels() def on_cancel(self): self.reset_reminder_levels() pass # this function will create new payment reminders @frappe.whitelist() def create_payment_reminders(sektion_id): args = { 'sektion_id': sektion_id } enqueue("mvd.mvd.doctype.mahnung.mahnung.bulk_create_payment_reminders", queue='long', job_name='{0} Mahnlauf'.format(sektion_id), timeout=5000, **args) return def bulk_create_payment_reminders(sektion_id): # get company company = frappe.get_doc("Sektion", sektion_id).company # get all customers with open sales invoices sql_query = ("""SELECT `customer` FROM `tabSales Invoice` WHERE `outstanding_amount` > 0 AND `docstatus` = 1 AND (`due_date` < CURDATE()) AND ((`exclude_from_payment_reminder_until` IS NULL) OR (`exclude_from_payment_reminder_until` < CURDATE())) AND `company` = "{company}" GROUP BY `customer`;""".format(company=company)) customers = frappe.db.sql(sql_query, as_dict=True) # get all sales invoices that are overdue if len(customers) > 0: max_level = 3 for customer in customers: sql_query = ("""SELECT `name`, `due_date`, `posting_date`, `payment_reminder_level`, `grand_total`, `outstanding_amount` , `currency`, `mv_mitgliedschaft` FROM `tabSales Invoice` WHERE `outstanding_amount` > 0 AND `customer` = '{customer}' AND `docstatus` = 1 AND (`due_date` < CURDATE()) AND `company` = "{company}" AND ((`exclude_from_payment_reminder_until` IS NULL) OR (`exclude_from_payment_reminder_until` < CURDATE())); """.format(customer=customer.customer, company=company)) open_invoices = frappe.db.sql(sql_query, as_dict=True) if open_invoices: now = datetime.now() invoices = [] mitgliedschaften = [] highest_level = 0 total_before_charges = 0 currency = None for invoice in open_invoices: level = invoice.payment_reminder_level + 1 if level > max_level: level = max_level new_invoice = { 'sales_invoice': invoice.name, 'amount': invoice.grand_total, 'outstanding_amount': invoice.outstanding_amount, 'posting_date': invoice.posting_date, 'due_date': invoice.due_date, 'reminder_level': level, 'ist_mitgliedschaftsrechnung': invoice.ist_mitgliedschaftsrechnung, 'mitgliedschafts_jahr': invoice.mitgliedschafts_jahr } if level > highest_level: highest_level = level total_before_charges += invoice.outstanding_amount invoices.append(new_invoice) currency = invoice.currency if invoice.mv_mitgliedschaft: mitgliedschaften.append({ 'mv_mitgliedschaft': invoice.mv_mitgliedschaft }) # find reminder charge charge_matches = frappe.get_all("ERPNextSwiss Settings Payment Reminder Charge", filters={ 'reminder_level': highest_level }, fields=['reminder_charge']) reminder_charge = 0 if charge_matches: reminder_charge = charge_matches[0]['reminder_charge'] druckvorlage = get_default_druckvorlage(sektion_id, frappe.get_value("Mitgliedschaft", mitgliedschaften[0]['mv_mitgliedschaft'], "language")) new_reminder = frappe.get_doc({ "doctype": "Mahnung", "sektion_id": sektion_id, "customer": customer.customer, "mitgliedschaften": mitgliedschaften, "hidden_linking": mitgliedschaften, "date": "{year:04d}-{month:02d}-{day:02d}".format( year=now.year, month=now.month, day=now.day), "title": "{customer} {year:04d}-{month:02d}-{day:02d}".format( customer=customer.customer, year=now.year, month=now.month, day=now.day), "sales_invoices": invoices, 'highest_level': highest_level, 'total_before_charge': total_before_charges, 'reminder_charge': reminder_charge, 'total_with_charge': (total_before_charges + reminder_charge), 'company': company, 'currency': currency, 'druckvorlage': druckvorlage, 'status_c': frappe.get_value("Mitgliedschaft", mitgliedschaften[0]['mv_mitgliedschaft'], "status_c") }) reminder_record = new_reminder.insert(ignore_permissions=True) frappe.db.commit() return 'Mahnungen wurden erstellt' else: return 'Keine Rechnungen zum Mahnen vorhanden' def get_default_druckvorlage(sektion, language): druckvorlage = frappe.get_list('Druckvorlage', fields='name', filters={'dokument': 'Mahnung', 'sektion_id': sektion, 'language': language or 'de', 'default': 1}, limit=1, ignore_ifnull=True) return druckvorlage[0].name def get_mahnungs_qrrs(mahnung): mahnung = frappe.get_doc("Mahnung", mahnung) sektion = frappe.get_doc("Sektion", mahnung.sektion_id) bankkonto = frappe.get_doc("Account", sektion.account) druckvorlage = frappe.get_doc("Druckvorlage", mahnung.druckvorlage) qrrs = [] for _sinv in mahnung.sales_invoices: sinv = frappe.get_doc("Sales Invoice", _sinv.sales_invoice) # receiver if sinv.company_address: cmp_addr = frappe.get_doc("Address", sinv.company_address) if cmp_addr: address_array = cmp_addr.address_line1.split(" ") address_line_item_count = len(address_array) cmp_country = frappe.get_doc("Country", cmp_addr.country) cmp_country_code = str(cmp_country.code).upper() cmp_address_line_detail = {'name': sinv.company, 'street': '', 'number': '', 'plz': cmp_addr.plz, 'city': cmp_addr.city, 'country': cmp_country_code } for i in range(0, (address_line_item_count - 1)): cmp_address_line_detail['street'] = cmp_address_line_detail['street'] + " " + address_array[i] cmp_address_line_detail['number'] = address_array[address_line_item_count - 1] receiver_name = cmp_address_line_detail['name'] receiver_street = cmp_address_line_detail['street'] receiver_number = cmp_address_line_detail['number'] receiver_pincode = cmp_address_line_detail['plz'] receiver_town = cmp_address_line_detail['city'] receiver_country = cmp_address_line_detail['country'] if cmp_addr.postfach: if cmp_addr.postfach_nummer: receiver_street = 'Postfach' receiver_number = cmp_addr.postfach_nummer else: receiver_street = 'Postfach' receiver_number = ' ' else: receiver_name = False receiver_street = False receiver_number = False receiver_pincode = False receiver_town = False receiver_country = False # payer if sinv.customer_address: pay_addr = frappe.get_doc("Address", sinv.customer_address) if pay_addr: if pay_addr.postfach: pay_country = frappe.get_doc("Country", pay_addr.country) pay_country_code = str(pay_country.code).upper() if pay_addr.postfach_nummer: postfach_nummer = pay_addr.postfach_nummer else: postfach_nummer = ' ' pay_address_line_detail = {'name': sinv.customer, 'street': 'Postfach', 'number': postfach_nummer, 'pin': pay_addr.pincode, 'city': pay_addr.city, 'country': pay_country_code } else: pay_address_trimed = str(pay_addr.address_line1).strip() pay_address_array = pay_address_trimed.split(" ") pay_address_line_item_count = len(pay_address_array) pay_country = frappe.get_doc("Country", pay_addr.country) pay_country_code = str(pay_country.code).upper() pay_address_line_detail = {'name': sinv.customer, 'street': '', 'number': '', 'pin': pay_addr.pincode, 'city': pay_addr.city, 'country': pay_country_code } for i in range(0, (pay_address_line_item_count - 1)): pay_address_line_detail['street'] = pay_address_line_detail['street'] + " " + pay_address_array[i] pay_address_line_detail['number'] = pay_address_array[pay_address_line_item_count - 1] payer_name = sinv.customer_name payer_street = pay_address_line_detail['street'] payer_number = pay_address_line_detail['number'] payer_pincode = pay_address_line_detail['pin'] payer_town = pay_address_line_detail['city'] payer_country = pay_address_line_detail['country'] if not payer_street: if payer_number: payer_street = payer_number payer_number = ' ' else: payer_name = False payer_street = False payer_number = False payer_pincode = False payer_town = False payer_country = False qrr_dict = { 'top_position': '191mm', 'iban': bankkonto.iban or '', 'reference': sinv.esr_reference, 'reference_type': 'QRR', 'currency': sinv.currency, 'amount': "{:,.2f}".format(sinv.outstanding_amount).replace(",", "'"), 'message': sinv.name, 'additional_information': ' ', 'receiver_name': receiver_name, 'receiver_street': receiver_street, 'receiver_number': receiver_number, 'receiver_country': receiver_country, 'receiver_pincode': receiver_pincode, 'receiver_town': _(receiver_town, druckvorlage.language or 'de'), 'payer_name': payer_name, 'payer_street': payer_street, 'payer_number': payer_number, 'payer_country': payer_country, 'payer_pincode': payer_pincode, 'payer_town': payer_town, 'language': druckvorlage.language or 'de' } qrrs.append(qrr_dict) return qrrs @frappe.whitelist() def kulanz_ausgleich(mahnung, sinv, amount, outstanding_amount, due_date): mahnung = frappe.get_doc("Mahnung", mahnung) pe = frappe.get_doc({ "doctype": "Payment Entry", "payment_type": "Receive", "posting_date": today(), "company": mahnung.company, "sektion_id": mahnung.sektion_id, "party_type": "Customer", "party": mahnung.customer, "paid_to": frappe.get_value("Sektion", mahnung.sektion_id, "kulanz_konto"), "paid_amount": outstanding_amount, "received_amount": outstanding_amount, "references": [ { "reference_doctype": "Sales Invoice", "reference_name": sinv, "due_date": due_date, "total_amount": amount, "outstanding_amount": outstanding_amount, "allocated_amount": outstanding_amount } ], "reference_no": "Kulanzausgleich via Mahnlauf {0}".format(mahnung.name), "reference_date": today(), "remarks": "Kulanzausgleich via Mahnlauf {0}".format(mahnung.name) }) pe.insert() pe.submit() frappe.db.commit() return @frappe.whitelist() def bulk_submit(mahnungen, alle): mahnungen = json.loads(mahnungen) if len(mahnungen) < 1: if int(alle) == 1: mahnungen = frappe.get_list('Mahnung', filters={'docstatus': 0}, fields=['name']) if len(mahnungen) < 1: return 'keine' else: return 'keine' args = { 'mahnungen': mahnungen } enqueue("mvd.mvd.doctype.mahnung.mahnung.bulk_submit_bgj", queue='long', job_name='Buche Mahnungen {0}'.format(mahnungen[0]["name"]), timeout=5000, **args) return mahnungen[0]["name"] def bulk_submit_bgj(mahnungen): for mahnung in mahnungen: mahnung = frappe.get_doc("Mahnung", mahnung["name"]) mahnung.update_reminder_levels() mahnung.submit() return @frappe.whitelist() def bulk_delete(): mahnungen = frappe.get_list('Mahnung', filters={'docstatus': 0}, fields=['name']) if len(mahnungen) < 1: return 'keine' args = { 'mahnungen': mahnungen } enqueue("mvd.mvd.doctype.mahnung.mahnung.bulk_delete_bgj", queue='long', job_name='Lösche Entwurfs-Mahnungen {0}'.format(mahnungen[0]["name"]), timeout=5000, **args) return mahnungen[0]["name"] def bulk_delete_bgj(mahnungen): for mahnung in mahnungen: mahnung = frappe.get_doc("Mahnung", mahnung["name"]) mahnung.delete() return @frappe.whitelist() def is_mahnungs_job_running(jobname): from frappe.utils.background_jobs import get_jobs running = get_info(jobname) return running def get_info(jobname): from rq import Queue, Worker from frappe.utils.background_jobs import get_redis_conn from frappe.utils import format_datetime, cint, convert_utc_to_user_timezone colors = { 'queued': 'orange', 'failed': 'red', 'started': 'blue', 'finished': 'green' } conn = get_redis_conn() queues = Queue.all(conn) workers = Worker.all(conn) jobs = [] show_failed=False def add_job(j, name): if j.kwargs.get('site')==frappe.local.site: jobs.append({ 'job_name': j.kwargs.get('kwargs', {}).get('playbook_method') \ or str(j.kwargs.get('job_name')), 'status': j.status, 'queue': name, 'creation': format_datetime(convert_utc_to_user_timezone(j.created_at)), 'color': colors[j.status] }) if j.exc_info: jobs[-1]['exc_info'] = j.exc_info for w in workers: j = w.get_current_job() if j: add_job(j, w.name) for q in queues: if q.name != 'failed': for j in q.get_jobs(): add_job(j, q.name) if cint(show_failed): for q in queues: if q.name == 'failed': for j in q.get_jobs()[:10]: add_job(j, q.name) found_job = 'refresh' for job in jobs: if job['job_name'] == jobname: found_job = True return found_job
python
# for python 3.x use 'tkinter' rather than 'Tkinter' import Tkinter as tk import time import math from serial import * #Setting up Serial port #for raspberry pi use serialPort = "/dev/ttyACM0" #serialPort = "/dev/tty.usbmodem1411 #serialPort = "/dev/cu.usbmodemFA131" #baudRate = 115200 #ser = Serial(serialPort , baudRate, timeout=0, writeTimeout=0) #ensure non-blocking, code will not run if the port is not connected #assigned variables dataList = [] #empty dataList for receiving data serBuffer = "" tempBuffer= "" depthBuffer = "" tickerForDepth = 0 probeTempBuffer = "" #not in use yet joyStickOneBuffer = "" joyStickTwoBuffer = "" joyStickThreeBuffer = "" joyStickFourBuffer = "" joyStickFiveBuffer = "" joyStickSixBuffer = "" servoBuffer = "" lightBuffer = "" xAccelBuffer = "" yAccelBuffer = "" zAccelBuffer = "" angle = 0 # angleBuffer = 0 dataArray=[] previousAngle = "" motorColor = "white" timeInWater = "00:00" topDepthNumber = 0 #saved values for depthValues in dataTwo middleDepthNumber = 0 bottomDepthNumber = 0 l=""#char for top middle bottom usage in dataTwo z=0 #value for depth canvas movement horizontal in data Two zz=0 lineCoordsX=0 lineCoordsY=0 lightX1=0 lightX2=0 lightY1=0 lightY2=0 r="" e = "" g = "" b = "" coords=0 looops = 1 color = "white" w=0 altitudeBuffer = "" class App(): def __init__(self): self.root = tk.Tk() self.root.title("SeaSweepers BRUCE the RILF") self.root.option_add("*Font", "Rockwell 20") #Use with MACBOOK #self.root.option_add("*Font", "Rockwell 15") what we used in comp self.root.minsize(width=1440, height=880) self.root.maxsize(width=1440, height=880) self.root.configure(bg ="gray") dataLabel = ['Volt (V)','Amp (A)','Inside Temp (C)','Inside Temp (F)','Probe Temperature','Pressure', 'V1','V2','V3','V4','H5','H6','H7','H8'] #set some labels x=1 c=2 r=13 for l in dataLabel: if (x > 12): self.l = tk.Label(text=l, bg ="gray", width=5).grid(column=c,row=r) if c < 5: c+=1 else: c=2 r=15 x+=1 continue self.l = tk.Label(text=l, bg ="gray").grid(column=0,row=x,columnspan=2) x+=2 self.warningTitle = tk.Label(text="WARNING", bg="yellow", width=10,height=2) self.stopTitle = tk.Label(text="STOP", bg="red", width=10,height=2) #LabelsData self.voltData = tk.Label(text="TBD",relief=tk.SUNKEN,width=20,height=2) self.ampData = tk.Label(text="TBD",relief=tk.SUNKEN,width=20,height=2) self.temperatureData = tk.Label(text="TBD",relief=tk.SUNKEN,width=20,height=2) self.insideTempF = tk.Label(text="TBD",relief=tk.SUNKEN,width=20,height=2) self.probeTemperatureDataCelcius = tk.Label(text="TBD",relief=tk.SUNKEN,width=20,height=2) self.pressureData = tk.Label(text="TBD",relief=tk.SUNKEN,width=20,height=2) self.waterLeak = tk.Label(text="Water Leak", bg ="gray", width=10) self.waterSensorDataOne = tk.Label(text="TBD", relief=tk.SUNKEN, width=20,height=2) self.waterSensorDataTwo = tk.Label(text="TBD", relief=tk.SUNKEN, width=20,height=2) self.angle = tk.Label(text="TBD",relief=tk.SUNKEN,width=20,height=2) #motorData labels self.motorOneData = tk.Label(text="TBD", relief=tk.SUNKEN,width=6,height=2) self.motorTwoData = tk.Label(text="TBD", relief=tk.SUNKEN,width=6,height=2) self.motorThreeData = tk.Label(text="TBD", relief=tk.SUNKEN,width=6,height=2) self.motorFourData = tk.Label(text="TBD", relief=tk.SUNKEN,width=6,height=2) self.motorFiveData = tk.Label(text="TBD", relief=tk.SUNKEN,width=6,height=2) self.motorSixData = tk.Label(text="TBD", relief=tk.SUNKEN,width=6,height=2) self.motorSevenData = tk.Label(text="TBD", relief=tk.SUNKEN,width=6,height=2) self.motorEightData = tk.Label(text="TBD", relief=tk.SUNKEN,width=6,height=2) #extra data points self.aTitle = tk.Label(text="Servo Claw", bg ="gray") #used for servo self.aData = tk.Label(text="TBD",relief=tk.SUNKEN,width=20,height=2) self.bTitle = tk.Label(text="Y", bg ="gray") self.bData = tk.Label(text="TBD",relief=tk.SUNKEN,width=20,height=2) self.cTitle = tk.Label(text=" ", bg ="gray") self.cData = tk.Label(text="TBD",relief=tk.SUNKEN,width=5,height=2) #depth Datas and Labels mission 1 self.currentDepthTitle = tk.Label(text="Current Depth (m)", bg ="gray") self.currentDepthData = tk.Label(text="TBD",relief=tk.SUNKEN,width=20,height=2) self.topDepthTitle = tk.Label(text="Starting Depth", bg ="orange") self.topDepthData = tk.Label(text="TBD",relief=tk.SUNKEN,width=10,height=2) self.middleDepthTitle = tk.Label(text="Middle Depth", bg ="red") self.middleDepthData = tk.Label(text="TBD",relief=tk.SUNKEN,width=10,height=2) self.bottomDepthTitle = tk.Label(text="Bottom Depth", bg ="yellow") self.bottomDepthData = tk.Label(text="TBD",relief=tk.SUNKEN,width=10,height=2) #depth buttons self.topDepthButton = tk.Button(text="top",width=7,highlightbackground="gray",command= self.topDepthValue) self.middleDepthButton = tk.Button(text="middle",width=7,highlightbackground="gray", command=self.middleDepthValue) self.bottomDepthButton = tk.Button(text="bottom",width=7,highlightbackground="gray", command=self.bottomDepthValue) #difference in depths self.iceDepth = tk.Label(text="Ice Depth", bg ="gray") self.oceanDepth = tk.Label(text="Ocean Depth", bg ="gray") self.iceData = tk.Label(text="TBD", relief=tk.SUNKEN,width=5) self.oceanData = tk.Label(text="TBD", relief=tk.SUNKEN,width=5) #temp datas and labels mission 2 self.probeTempTitle = tk.Label(text="Probe Temp", bg ="gray") self.probeData = tk.Label(text="TBD",relief=tk.SUNKEN,width=10,height=2) self.probeDataF = tk.Label(text="TBD",relief=tk.SUNKEN,width=10,height=2) self.C = tk.Label(text="Celcius", bg ="gray",width=10,height=2) self.F = tk.Label(text="Fahrenheit", bg ="gray",width=10,height=2) self.probeButton = tk.Button(text="top",width=7,highlightbackground="gray",command=self.probeTempValue) #top right stuff self.timerTitle = tk.Label(text="Timer", bg="gray",width=15,height=2) self.timerButton = tk.Button(text= "Start", bg="gray", width=12,height=2,highlightbackground="gray", command=self.getTime) self.timerData = tk.Label(text="00:00", relief=tk.SUNKEN, width=7,height=1,font=("Rockwell", 100),bg="green") #self.timerData = tk.Label(text="00:00", relief=tk.SUNKEN, width=7,height=1,font=("Rockwell", 80),bg="green")#for raspberry pi self.dataButton = tk.Button(text="compile data", bg="gray", width=12,height=2,highlightbackground="gray", command=self.getData) #depthCanvas for depth self.depthCanvas = tk.Canvas(self.root, width=800, height = 500, background= "blue",bd=0,highlightthickness=1) self.rov2 = self.depthCanvas.create_polygon(0, 0, 40, 0, 40,5, 30,5, 30,15, 40,15, 40,20, 0,20, 0,15, 10,15, 10,5, 0,5, 0,0,outline='black', fill='black') self.light = self.depthCanvas.create_arc(0, -10, 90, 30,start=-30,outline='blue', fill='white',extent=60) self.topDepthLine = self.depthCanvas.create_line(0,0,800,0, fill = "orange",width=3, dash=(8, 8)) self.middleDepthLine = self.depthCanvas.create_line(0,0,800,0, fill = "red",width=3, dash=(8, 8)) self.bottomDepthLine = self.depthCanvas.create_line(0,0,800,0, fill = "yellow",width=3, dash=(8, 8)) self.finishLineWhite = self.depthCanvas.create_line(760, 0, 760, 500, fill = "white",width=8, dash=(20, 20)) self.finishLineBlack = self.depthCanvas.create_line(760, 20, 760, 500, fill = "black",width=8, dash=(20, 20)) bucket = 630 self.bucketWhite = self.depthCanvas.create_polygon(bucket,440, bucket+22,440, bucket+22,410, bucket+28,410, bucket+28,440, bucket+50,440, bucket+50,500, bucket,500,fill="white",outline="black") self.bucketLid = self.depthCanvas.create_rectangle(bucket-2,438, bucket+52,442,fill="orange",outline="black") cap = 100 self.capWhite = self.depthCanvas.create_polygon(cap,480, cap+15,480, cap+15,410, cap+35,410, cap+35,480, cap+50,480, cap+50,500, cap,500,fill="white",outline="black") self.capLid = self.depthCanvas.create_rectangle(cap-2,480, cap+52,480,fill="white",outline="black") self.flange = self.depthCanvas.create_rectangle(cap+10,410, cap+40,440,fill="black",outline="black") self.bolt = self.depthCanvas.create_polygon(cap+18,410, cap+12,410, cap+12,407, cap+25,407, cap+25,402, cap+5,402, cap+5,407, cap+18,407,fill="white", outline="black") self.bolt = self.depthCanvas.create_polygon(cap+38,410, cap+32,410, cap+32,407, cap+45,407, cap+45,402, cap+25,402, cap+25,407, cap+38,407,fill="white", outline="black") cube = 500 self.cubeSateOutside = self.depthCanvas.create_rectangle(cube,460, cube+70,500,width=1,fill="white",outline="black") self.cubeSateMiddle = self.depthCanvas.create_rectangle(cube+5,465, cube+65,495,width=1,fill="blue",outline="white") self.cubeSateInside = self.depthCanvas.create_rectangle(cube+10,470, cube+60,490,fill="white",outline="white") self.cubeSateLine = self.depthCanvas.create_line(cube+5, 480, cube+65,480, fill="white",width=5) self.cubeSatNumber = self.depthCanvas.create_text(cube+35, 480, text="H139D") coral =550 self.coralBranchOne = self.depthCanvas.create_line(coral+3, 475, coral+15, 490, fill = "red",width=2) self.coralBranchTwo = self.depthCanvas.create_line(coral+10, 470, coral+15, 490, fill = "yellow",width=2) self.coralBranchThree = self.depthCanvas.create_line(coral+30, 465, coral+15, 490, fill = "orange",width=2) self.coralBranchFour = self.depthCanvas.create_line(coral, 460, coral+15, 490, fill = "purple",width=2) self.coralBranchFive = self.depthCanvas.create_line(coral+35, 470, coral+15, 490, fill = "white") self.coralBase = self.depthCanvas.create_polygon(coral+18,490, coral+12,490, coral+12,492, coral+25,492, coral+25,497, coral+5,497, coral+5,492, coral+18,492,fill="white") oilT=0 self.oilTOne = self.depthCanvas.create_polygon(oilT,475, oilT+35,475, oilT+35,485, oilT+23,485, oilT+23,496, oilT+12,496, oilT+12,485, oilT,485,fill="brown",outline="black") self.oilTTwo = self.depthCanvas.create_polygon(oilT+40,475, oilT+75,475, oilT+75,485, oilT+63,485, oilT+63,496, oilT+52,496, oilT+52,485, oilT+40,485,fill="brown",outline="black") self.oilTBase = self.depthCanvas.create_rectangle(oilT+10,496, oilT+64,500, fill="brown",outline="black") cCoral=600 self.aBase = self.depthCanvas.create_line(cCoral,500, cCoral+50,500,fill="#daa520",width=5) self.aBranch = self.depthCanvas.create_line(cCoral+25,498, cCoral+25,430, fill="#daa520",width=5) self.aBranchTwo = self.depthCanvas.create_line(cCoral,480, cCoral+50,480,fill="#daa520",width=5) self.aBranchThree = self.depthCanvas.create_line(cCoral,483, cCoral,460,fill="#daa520",width=5) self.aBranchFour = self.depthCanvas.create_line(cCoral+50,483, cCoral+50,450,fill="#daa520",width=5) self.aBranchDead = self.depthCanvas.create_line(cCoral+15,445, cCoral+23,445, fill="black",width=5) self.aBranchDeadTwo = self.depthCanvas.create_line(cCoral+15,445, cCoral+15,430, fill="black",width=5) self.aSign = self.depthCanvas.create_rectangle(cCoral+15,480, cCoral+35,498, fill="white") self.aLetter = self.depthCanvas.create_text(cCoral+25,490, text="A") ESP = 200 self.box = self.depthCanvas.create_polygon(ESP+50,450, ESP+100,450, ESP+100,500, ESP+50,500,fill="red",outline="black") self.box = self.depthCanvas.create_polygon(ESP+50,450, ESP+100,450, ESP+100,500, ESP+50,500,fill="red",outline="black") self.connector = self.depthCanvas.create_polygon(ESP,480, ESP+20,480, ESP+20,470, ESP+30,470, ESP+30,480, ESP+60,480, ESP+60,490, ESP+30,490, ESP+30,500, ESP+20,500, ESP+20,490, ESP,490, fill="white",outline="black") self.connectorLine = self.depthCanvas.create_line(ESP,485, ESP-10,480, fill="orange") self.connectorLineT = self.depthCanvas.create_line(ESP-10,480, ESP-40,490, fill="orange") #servoCanvas self.servoCanvas = tk.Canvas(self.root, width=200, height = 150, background= "blue") self.servoClawRight = self.servoCanvas.create_polygon(0,0, 20,0, 20,10, 30,10, 30,30, 20,30, 20,100, 0,100, outline='black', fill='black') self.servoClawLeft = self.servoCanvas.create_polygon(200,0, 180,0, 180,10, 170,10, 170,30, 180,30, 180,100, 200,100, outline='black', fill='black') self.rovBase = self.servoCanvas.create_polygon(0,100,200,100,200,150,0,150,outline='black', fill='black') #compassCanvas self.compassCanvas = tk.Canvas(self.root, width=200, height = 200, background= "gray") self.compass = self.compassCanvas.create_oval(10, 10, 190, 190, outline='black', fill='white') self.compassArcNegativeOne = self.compassCanvas.create_arc(10, 10, 190, 190,start=90, fill='green',extent=0) self.compassArcNegativeTwo = self.compassCanvas.create_arc(10, 10, 190, 190,start=90, fill='yellow',extent=0) self.compassArcNegativeThree = self.compassCanvas.create_arc(10, 10, 190, 190,start=90, fill='orange',extent=0) self.compassArcNegativeFour = self.compassCanvas.create_arc(10, 10, 190, 190,start=90, fill='red',extent=0) self.compassArc = self.compassCanvas.create_arc(10, 10, 190, 190,start=90, fill='green',extent=0) self.compassArcTwo = self.compassCanvas.create_arc(10, 10, 190, 190,start=90, fill='yellow',extent=0) self.compassArcThree = self.compassCanvas.create_arc(10, 10, 190, 190,start=90, fill='orange',extent=0) self.compassArcFour = self.compassCanvas.create_arc(10, 10, 190, 190,start=90, fill='red',extent=0) self.compassLineOne = self.compassCanvas.create_line(100,100,10,60, fill="red",arrow=tk.LAST, arrowshape=(70,75,3)) self.compassLineTwo = self.compassCanvas.create_line(100,100,10,60,arrow=tk.LAST, arrowshape=(50,55,3)) self.middle = self.compassCanvas.create_oval(95,95,105,105, outline='black', fill='white') #motorControl canvas self.motorControl = tk.Canvas(self.root, width=200, height = 200, background= "blue") self.hexagon = self.motorControl.create_polygon(25,75,75,25,125,25,175,75,175,135,125,185,75,185,25,135, outline='black', fill='black') self.V1 = self.motorControl.create_oval(40,40,60,60, outline='black', fill='white') self.V1R = self.motorControl.create_arc(40,40,60,60, start=90, fill='green',extent=0)#tk.CHORDS? self.V2 = self.motorControl.create_oval(140,40,160,60, outline='black', fill='white') self.V2R = self.motorControl.create_arc(140,40,160,60, start=90, fill='green',extent=0) self.V3 = self.motorControl.create_oval(40,150,60,170, outline='black', fill='white') self.V3R = self.motorControl.create_arc(40,150,60,170, start=90, fill='green',extent=0) self.V4 = self.motorControl.create_oval(140,150,160,170, outline='black', fill='white') self.V4R = self.motorControl.create_arc(140,150,160,170, start=90, fill='green',extent=0) self.H1 = self.motorControl.create_polygon(50,80,80,50,90,60,60,90,50,80, outline='black', fill='white') self.H1R = self.motorControl.create_polygon(65,65,80,50,90,60,75,75,65,65,outline='black',fill='green') self.H2 = self.motorControl.create_polygon(150,80,120,50,110,60,140,90,150,80, outline='black', fill='white') self.H2R = self.motorControl.create_polygon(135,65,120,50,110,60,125,75,135,65,outline='black',fill='green') self.H3 = self.motorControl.create_polygon(50,120,80,150,90,140,60,110,50,120, outline='black', fill='white') self.H3R = self.motorControl.create_polygon(65,135,80,150,90,140,75,125,65,135,outline='black',fill='green') self.H4 = self.motorControl.create_polygon(150,120,120,150,110,140,140,110,150,120, outline='black', fill='white') self.H4R = self.motorControl.create_polygon(135,135,120,150,110,140,125,125,135,135,outline='black',fill='green') #error display self.errorLog = tk.Text(self.root, width=45, height=4) self.messageLog = tk.Text(self.root, width=45, height=4) #grid layout #left column self.warningTitle.grid( column=0, row=0) self.stopTitle.grid( column=1, row=0) self.voltData.grid( column=0, row=2, columnspan=2) self.ampData.grid( column=0, row=4, columnspan=2) self.temperatureData.grid( column=0, row=6, columnspan=2) self.angle.grid( column=2, row=6, columnspan=4) self.insideTempF.grid( column=0, row=8, columnspan=2) self.probeTemperatureDataCelcius.grid(column=0,row=10,columnspan=2) self.pressureData.grid( column=0, row=12, columnspan=2) self.waterLeak.grid( column=6, row=0) self.waterSensorDataOne.grid( column=2, row=0, columnspan=4) self.waterSensorDataTwo.grid( column=7, row=0, columnspan=2) #motor grid self.motorOneData.grid( column=2, row=14) self.motorTwoData.grid( column=3, row=14) self.motorThreeData.grid( column=4, row=14) self.motorFourData.grid( column=5, row=14) self.motorFiveData.grid( column=2, row=16) self.motorSixData.grid( column=3, row=16) self.motorSevenData.grid( column=4, row=16) self.motorEightData.grid( column=5, row=16) #extras self.aTitle.grid( column=6, row=13) #self.aData.grid( column=6, row=14) #self.bTitle.grid( column=6, row=15) #self.bData.grid( column=6, row=16) #self.cTitle.grid( column=9, row=15) #self.cData.grid( column=9, row=16) #right side self.timerTitle.grid( column=10, row=2, columnspan= 2) self.timerButton.grid( column=12, row=2, columnspan= 3) self.dataButton.grid( column=12, row=3, columnspan= 3) self.timerData.grid( column=10, row=0, columnspan= 5, rowspan=2) self.currentDepthTitle.grid( column=10, row=3, columnspan= 2) self.currentDepthData.grid( column=10, row=4, columnspan= 2) self.topDepthTitle.grid( column=10, row=5) self.topDepthButton.grid( column=11, row=5) self.topDepthData.grid( column=10, row=6) self.middleDepthTitle.grid( column=10, row=7) self.middleDepthButton.grid( column=11, row=7) self.middleDepthData.grid( column=10, row=8) self.bottomDepthTitle.grid( column=10, row=9) self.bottomDepthButton.grid( column=11, row=9) self.bottomDepthData.grid( column=10, row=10) self.iceDepth.grid( column=12, row=6) self.iceData.grid( column=12, row=7) self.oceanDepth.grid( column=12, row=8) self.oceanData.grid( column=12, row=9) #probe right side self.probeTempTitle.grid( column=10, row=11) self.probeButton.grid( column=11, row=11) self.probeData.grid( column=10, row=12) self.probeDataF.grid( column=11, row=12) self.C.grid( column=10, row=13) self.F.grid( column=11, row=13) #canvases self.depthCanvas.grid( column=2, row=2, columnspan=8, rowspan=11) self.compassCanvas.grid( column=7, row=13, columnspan=1, rowspan=4) self.motorControl.grid( column=0, row=13, columnspan=2, rowspan=4) self.servoCanvas.grid( column=6, row=14, rowspan=3) self.errorLog.grid( column=9, row=13, columnspan=4, rowspan=2) self.messageLog.grid( column=9, row=15, columnspan=4, rowspan=2) self.update_data() self.root.mainloop() #functions def getData(self): global dataList newList = dataList[:] file = open('dataSheet.txt','a') file.truncate() for i in newList: file.write(i) print "done" #file.close() def topDepthValue(self): global depthBuffer global topDepthNumber tb = depthBuffer length = len(tb) length = length - 2 labelDepth = tb[:length] + "." + tb[length:] self.topDepthData.configure(text=labelDepth) topDepthNumber = float(tb) self.depthCanvas.update() def middleDepthValue(self): global depthBuffer global middleDepthNumber length = len(depthBuffer) length = length - 2 labelDepth = depthBuffer[:length] + "." + depthBuffer[length:] self.middleDepthData.configure(text=labelDepth) middleDepthNumber = float(depthBuffer) self.depthCanvas.update() def bottomDepthValue(self): global depthBuffer global bottomDepthNumber length = len(depthBuffer) length = length - 2 labelDepth = depthBuffer[:length] + "." + depthBuffer[length:] self.bottomDepthData.configure(text=labelDepth) bottomDepthNumber = float(depthBuffer) self.depthCanvas.update() def probeTempValue(self): global probeTempBuffer try: convertedTemp = self.tempConversion("p") self.probeDataF.configure(text=convertedTemp) self.probeData.configure(text=probeTempBuffer) except: self.probeDataF.configure(text="ERR") self.probeData.configure(text="ERR") def updateClock(self): now = time.time() global startTime global timeInWater timeElapsed = int(now) - int(startTime) minutes= int(timeElapsed / 60) if minutes >13: self.timerData.configure(bg = "red") elif minutes >12: self.timerData.configure(bg = "yellow") if minutes < 10: minutes = "0" + str(minutes) seconds= timeElapsed % 60 if seconds < 10: seconds = "0" +str(seconds) timeElapsed = str(minutes)+":"+str(seconds) timeInWater = timeElapsed self.timerData.configure(text=timeElapsed) self.root.after(1000, self.updateClock) #timer function def getTime(self): global z z=0 self.depthCanvas.delete() start = time.time() global startTime startTime = int(start) self.updateClock() def update_data(self): global dataList global timeInWater global w ser.open serr="" c= "" first = 0 global dataArray dataArray = [] data = ser.readline() #dataList.append(timeInWater) #dataList.append(data) #print dataList for i in data: dataArray.append(i) #print dataArray w = 0 ser.close for i in range(19): self.dataOne(i) self.dataTwo() self.root.after(20, self.update_data) def dataOne(self,c): head = ['A','B','E','F','G','H','I','J','K','L','M','N','O','P','Q','R','S','T','U','V','W','X','Y','Z', 'a','b','c','d','e','f','g','h','i','j','k','l','m','n'] limits = ['30','35','10000','200000','3000', '3500','10000','10000','10000','10000','10000','10000','10000', '10000','10000','10000','10000','10000','10000','10000','10000','1000','10','13','1000','1000','100','100', '100','100','100','100','1000','1000','50','100','50','100'] global color global motorColor global w global coords # buffers = ['tempBuffer','pressureBuffer', 'probeTempBuffer','joyStickOneBuffer', #'joyStickTwoBuffer','joyStickThreeBuffer','joyStickFourBuffer','joyStickFiveBuffer','joyStickSixBuffer','hThreeBuffer', #'hFourBuffer','totalVoltBuffer','totalAmpBuffer','xAccelBuffer','yAccelBuffer', #'zAccelBuffer','angleBuffer','waterOne','waterTwo'] global buf #buf = buffers[c] first = 0 buf = "" for item in range(len(dataArray)): if first == 0: if dataArray[item] == head[2*c]: first +=1 #print item a = 1 try: while dataArray[int(item)+a] != head[(2*c)+1]: buf += dataArray[int(item)+a] a +=1 #print int(buf) data points if (c==17)or(c==18): if int(buf) >= int(limits[(2*c)+1]): color = "red" self.stopTitle.configure(bg = color) w+=1 #ser.open #ser.write(b'2') #ser.close elif int(buf)>= int(limits[(2*c)]): color = "yellow" self.warningTitle.configure(bg = color) w+=1 #ser.open #ser.write(b'1') #ser.close else: color = "white" if w == 0: self.warningTitle.configure(bg = "gray") self.stopTitle.configure(bg = "gray") #ser.open #ser.write(b'0') #ser.close except: print "bad data" + str(c) if c == 0: try: self.temperatureData.configure(text=buf,bg = color) global tempBuffer tempBuffer = buf f = self.tempConversion("i") self.insideTempF.configure(text=f, bg = color) except: print "bad temp" elif c == 1: try: self.pressureData.configure(text=buf, bg = color) except: print "bad pressure" elif c == 2: try: length = len(buf) length = length - 2 buf = buf[:length] + "." + buf[length:] self.probeTemperatureDataCelcius.configure(text=buf, bg = color) global probeTempBuffer probeTempBuffer = buf except: print"bad probe temp" elif c == 3: global joyStickOneBuffer joyStickOneBuffer = buf elif c == 4: global joyStickTwoBuffer joyStickTwoBuffer = buf #self.joyStickConversion() elif c == 5: global joyStickThreeBuffer joyStickThreeBuffer = buf elif c == 6: global joyStickFourBuffer joyStickFourBuffer = buf elif c == 7: global joyStickFiveBuffer joyStickFiveBuffer = buf elif c == 8: #global lightBuffer global joyStickSixBuffer joyStickSixBuffer = buf #lightBuffer = buf self.joyStickConversion() elif c == 9: global servoBuffer servoBuffer = buf self.servoMove() elif c == 10: global lightBuffer lightBuffer = buf self.lightOn() elif c == 11: try: self.voltData.configure(text="12", bg = color) except: print "bad volt data" elif c == 12: try: self.ampData.configure(text="1.0", bg = color) except: print "bad amp data" elif c == 13: try: adf=1 #self.aData.configure(text=buf, bg = color) except: print "bad a data" elif c == 14: try: self.bData.configure(text=buf, bg = color) except: print "bad b data" elif c == 15: try: self.cData.configure(text=buf, bg = color) except: print "bad c data" elif c == 16: try: self.angle.configure(text=buf, bg = color) #self.compassData(buf) except: print "bad compass data" elif c == 17: try: self.waterSensorDataOne.configure(text=buf, bg = color) except: print "bad water sensor data" elif c == 18: try: self.waterSensorDataTwo.configure(text=buf, bg = color) except: print "bad water sensor data" def dataTwo(self): global topDepthNumber global middleDepthNumber global bottomDepthNumber global z #value for depth canvas movement horizontal global zz global tickerForDepth #will replace z global coords global color global depthBuffer global timeInWater global lightBuffer global lightX1 global lightX2 global lightY1 global lightY2 first = 0 for item in range(len(dataArray)): if first == 0: if dataArray[item] == 'C': first +=1 depthBuffer = '' #print item a = 1 try: while dataArray[int(item)+a] != 'D': depthBuffer += dataArray[int(item)+a] a +=1 except: print "bad depthData" try: coords = int(depthBuffer) if timeInWater != ("00:00"): second = timeInWater[4:] if (second != tickerForDepth): self.depthCanvas.coords(self.rov2, 0+z, 0+(coords), 40+z, 0+(coords), 40+z,5+(coords), 30+z,5+(coords), 30+z,15+(coords), 40+z,15+(coords), 40+z,20+(coords), 0+z,20+(coords), 0+z,15+(coords), 10+z,15+(coords), 10+z,5+(coords), 0+z,5+(coords), 0+z,0+(coords)) self.depthCanvas.coords(self.light, lightX1+z, lightY1+(coords), lightX2+z, lightY2+(coords)) global lineCoordsX global lineCoordsY #coords for line item = self.depthCanvas.create_line(lineCoordsX, lineCoordsY, z, (coords), fill = "white",width=1) lineCoordsX=z lineCoordsY=(coords) tickerForDepth = second if ((zz%5)!=0): z+=1 zz+=1 self.depthCanvas.coords(self.topDepthLine,0,topDepthNumber,800,topDepthNumber) self.depthCanvas.coords(self.middleDepthLine,0,middleDepthNumber,800,middleDepthNumber) self.depthCanvas.coords(self.bottomDepthLine,0,bottomDepthNumber,800,bottomDepthNumber) minute = timeInWater[:2]+timeInWater[3:] if (int(minute) % 100) == 0: item = self.depthCanvas.create_line(z, 450, z, 500, fill = "white",width=1) ice = (topDepthNumber - middleDepthNumber)/100 ocean = (middleDepthNumber - bottomDepthNumber)/100 self.iceData.configure(text=ice) self.oceanData.configure(text=ocean) except: print"bad depth" length = len(depthBuffer) length = length - 2 labelDepth = depthBuffer[:length] + "." + depthBuffer[length:] self.currentDepthData.configure(text=labelDepth,bg = color) def compassData(self,angle): global previousAngle global looops try: pi = int(angle) previousAngle = "" previousAngle += angle looops = (int(angle)/360)+4 except: angle = previousAngle angleBuffer = "" # empty the buffer r = int(angle) num = math.radians(pi) num2 = math.radians(pi+180) x =(100-math.sin(num)*70) y= (100-math.cos(num)*70) x2=(100-math.sin(num2)*50) y2= (100-math.cos(num2)*50) self.compassCanvas.coords(self.compassLineOne, 100,100,x,y) self.compassCanvas.coords(self.compassLineTwo, 100,100,x2,y2) if looops == 4: self.compassCanvas.itemconfigure(self.compassArc, extent=r) self.compassCanvas.itemconfigure(self.compassArcTwo, extent=0) self.compassCanvas.itemconfigure(self.compassArcThree, extent=0) self.compassCanvas.itemconfigure(self.compassArcFour, extent=0) self.compassCanvas.itemconfigure(self.compassArcNegativeOne, extent=0) self.compassCanvas.itemconfigure(self.compassArcNegativeTwo, extent=0) self.compassCanvas.itemconfigure(self.compassArcNegativeThree, extent=0) self.compassCanvas.itemconfigure(self.compassArcNegativeFour, extent=0) if looops == 5: self.compassCanvas.itemconfigure(self.compassArc, extent=359) self.compassCanvas.itemconfigure(self.compassArcTwo, extent=r) self.compassCanvas.itemconfigure(self.compassArcThree, extent=0) self.compassCanvas.itemconfigure(self.compassArcFour, extent=0) if looops == 6: self.compassCanvas.itemconfigure(self.compassArcTwo, extent=359) self.compassCanvas.itemconfigure(self.compassArcThree, extent=r) self.compassCanvas.itemconfigure(self.compassArcFour, extent=0) if looops == 7: self.compassCanvas.itemconfigure(self.compassArcTwo, extent=359) self.compassCanvas.itemconfigure(self.compassArcFour, extent=r) if looops == 3: self.compassCanvas.itemconfigure(self.compassArc, extent=0) self.compassCanvas.itemconfigure(self.compassArcNegativeFour, extent=0) self.compassCanvas.itemconfigure(self.compassArcNegativeThree, extent=0) self.compassCanvas.itemconfigure(self.compassArcNegativeTwo, extent=0) self.compassCanvas.itemconfigure(self.compassArcNegativeOne, extent=r) if looops == 2: self.compassCanvas.itemconfigure(self.compassArcNegativeFour, extent=0) self.compassCanvas.itemconfigure(self.compassArcNegativeThree, extent=0) self.compassCanvas.itemconfigure(self.compassArcNegativeOne, extent=359) self.compassCanvas.itemconfigure(self.compassArcNegativeTwo, extent=r) if looops == 1: self.compassCanvas.itemconfigure(self.compassArcNegativeFour, extent=0) self.compassCanvas.itemconfigure(self.compassArcNegativeThree, extent=r) self.compassCanvas.itemconfigure(self.compassArcNegativeTwo, extent=359) self.compassCanvas.itemconfigure(self.compassArcNegativeOne, extent=359) if looops == 0: self.compassCanvas.itemconfigure(self.compassArcNegativeFour, extent=r) self.compassCanvas.itemconfigure(self.compassArcNegativeThree, extent=359) self.compassCanvas.itemconfigure(self.compassArcNegativeTwo, extent=359) self.compassCanvas.itemconfigure(self.compassArcNegativeOne, extent=359) self.compassCanvas.update() def motorCanvasColor(self, buf): try: if int(buf)>500: return "orange" elif int(buf)>300: return "yellow" elif int(buf)>100: return "green" else: return "white" except: print"bad motorCanvasColor" def tempConversion(self,t): if t == "p": global probeTempBuffer try: fahreinheit = ((float(probeTempBuffer)*1.8000)+32.00) except: print"bad probe temp data" if t == "i": global tempBuffer try: fahreinheit = ((float(tempBuffer)*1.8000)+32.00) except: print"bad inside temp" try: fahreinheit = float(fahreinheit * 100) fahreinheit = float(int(fahreinheit) / 100) return fahreinheit except: return 0000 def servoMove(self): global servoBuffer try: move = int(servoBuffer)/14 self.servoCanvas.coords(self.servoClawRight, 0+move,0, 20+move,0, 20+move,10, 30+move,10, 30+move,30, 20+move,30, 20+move,100, 0+move,100) self.servoCanvas.coords(self.servoClawLeft, 200-move,0, 180-move,0, 180-move,10, 170-move,10, 170-move,30, 180-move,30, 180-move,100, 200-move,100) except: print"bad servo conversion" def lightOn(self): global lightBuffer global lightX1 global lightX2 global lightY1 global lightY2 try: beam = int(lightBuffer) if (beam <= 445): self.depthCanvas.itemconfigure(self.light, extent=0) elif (beam > 445): lightBeam = beam/10 st = -(lightBeam/2) yVal = self.map(beam,400,1023,0,20) xVal = self.map(beam,400,1023,0,45) lightX1=45-(xVal) lightX2=45+(xVal) lightY1=10-(yVal) lightY2=10+(yVal) self.depthCanvas.itemconfigure(self.light,start= st,extent =lightBeam) except: print "bad beam" def joyStickConversion(self): global joyStickOneBuffer global joyStickTwoBuffer global joyStickThreeBuffer global joyStickFourBuffer global joyStickFiveBuffer global joyStickSixBuffer global angle vOne = 0 vTwo = 0 vThree = 0 vFour = 0 hOne = 0 hTwo = 0 hThree = 0 hFour = 0 try: joyStickOne = self.joyStickMap(int(joyStickOneBuffer)) joyStickTwo = self.joyStickMap(int(joyStickTwoBuffer)) joyStickThree = self.joyStickMap(int(joyStickThreeBuffer)) joyStickFour = self.joyStickMap(int(joyStickFourBuffer)) joyStickFive = self.joyStickMap(int(joyStickFiveBuffer)) joyStickSix = self.joyStickMap(int(joyStickSixBuffer)) except: print"bad joystick conversion" try: if (joyStickOne > 50) or (joyStickOne < -50): vOne = joyStickOne vTwo = joyStickOne vThree = joyStickOne vFour = joyStickOne elif (joyStickThree > 50) or (joyStickThree < -50): vOne = joyStickThree vTwo = joyStickThree vThree = joyStickThree vFour = joyStickThree elif (joyStickFour > 15) or (joyStickFour < -15): vOne = joyStickFour vTwo = joyStickFour vThree = -int(joyStickFour) vFour = -int(joyStickFour) if (joyStickTwo > 50) or (joyStickTwo < -50): hOne = joyStickTwo hTwo = joyStickTwo hThree = joyStickTwo hFour = joyStickTwo elif (joyStickFive > 50) or (joyStickFive < -50): hOne = int(joyStickFive) hTwo = -joyStickFive hThree = -int(joyStickFive) hFour = joyStickFive elif (joyStickSix > 15) or (joyStickSix < -15): hOne = joyStickSix hTwo = joyStickSix hThree = -int(joyStickSix) hFour = -int(joyStickSix) if (joyStickSix > 0): angle += 10 elif (joyStickSix < 0): angle -= 10 angle = str(angle) self.compassData(angle) angle = int(angle) except: print"bad joystick read" try: self.motorOneData.configure(text=vOne) self.motorTwoData.configure(text=vTwo) self.motorThreeData.configure(text=vThree) self.motorFourData.configure(text=vFour) self.motorFiveData.configure(text=hOne) self.motorSixData.configure(text=hTwo) self.motorSevenData.configure(text=hThree) self.motorEightData.configure(text=hFour) except: print "bad labeling motors" try: hOne = int(hOne)/17 hTwo = int(hTwo)/17 hThree = int(hThree)/17 hFour = int(hFour)/17 self.motorControl.coords(self.H1R, 65,65,65+hOne,65-hOne,75+hOne,75-hOne,75,75,65,65) self.motorControl.coords(self.H2R, 135,65,135+hTwo,65+hTwo,125+hTwo,75+hTwo,125,75,135,65) self.motorControl.coords(self.H3R, 65,135,65+hThree,135+hThree,75+hThree,125+hThree,75,125,65,135) self.motorControl.coords(self.H4R, 135,135,135+hFour,135-hFour,125+hFour,125-hFour,125,125,135,135) self.motorControl.itemconfigure(self.V1R, extent=vOne) self.motorControl.itemconfigure(self.V2R, extent=vTwo) self.motorControl.itemconfigure(self.V3R, extent=vThree) self.motorControl.itemconfigure(self.V4R, extent=vFour) except: print "bad joystick canvas" #motorColor = self.motorCanvasColor(vOne) #self.motorControl.itemconfigure(self.V1, fill=motorColor) #self.motorControl.itemconfigure(self.V2, fill=motorColor) #self.motorControl.itemconfigure(self.V3, fill=motorColor) #self.motorControl.itemconfigure(self.V4, fill=motorColor) self.motorControl.update() def map(self, value, leftMin, leftMax, rightMin, rightMax): leftSpan = leftMax - leftMin # Figure out how 'wide' each range is rightSpan = rightMax - rightMin valueScaled = float(value - leftMin) / float(leftSpan) # Convert the left range into a 0-1 range (float) return int(rightMin + (valueScaled * rightSpan)) # Convert the 0-1 range into a value in the right range. def joyStickMap(self, joyStick): if joyStick >= 512: #512-1023 down joyStick = self.map(joyStick, 512, 1023, 0, 255) elif joyStick < 512: #0-512 up joyStick = self.map(joyStick, 512, 0, 0, 255) joyStick = (joyStick * -1) return joyStick app=App()
python
#------------------------------------------------------------------------------- # Name: opan_const # Purpose: Test objects for opan.const # # Author: Brian Skinn # bskinn@alum.mit.edu # # Created: 10 Mar 2016 # Copyright: (c) Brian Skinn 2016 # License: The MIT License; see "license.txt" for full license terms # and contributor agreement. # # This file is part of opan (Open Anharmonic), a system for automated # computation of anharmonic properties of molecular systems via wrapper # calls to computational/quantum chemical software packages. # # http://www.github.com/bskinn/opan # #------------------------------------------------------------------------------- import unittest class TestOpanEnumValueCheck(unittest.TestCase): def test_OpanEnum_ValueCheck(self): from opan.const import EnumDispDirection as EDD # Representative value in a representative Enum self.assertTrue(EDD.NEGATIVE in EDD) def test_OpanEnum_IterCheck(self): from opan.const import EnumDispDirection as EDD self.assertSetEqual({'NEGATIVE', 'NO_DISP', 'POSITIVE'}, set(k for k in EDD)) def suite(): s = unittest.TestSuite() tl = unittest.TestLoader() s.addTests([tl.loadTestsFromTestCase(TestOpanEnumValueCheck) ]) return s if __name__ == '__main__': # pragma: no cover print("Module not executable.")
python
import pytest # test_specials.py # Tests the special abilities of each character import helpers as H def test_bob_kill_hunter(): # Get a game containing Bob gc, ef, p = H.get_game_with_character("Bob") # Check that Bob hasn't won initially, or with 4 equips assert not p.character.win_cond(gc, p) # Check that Bob wins if he kills a neutral hunter = H.get_a_hunter(gc) hunter.setDamage(20, p) assert not p.character.win_cond(gc, p) assert p in gc.getDeadPlayers() test_bob_kill_hunter()
python
''' Created on 12. 10. 2016 @author: neneko ''' from lxml import etree try: from StringIO import StringIO except ImportError: from io import BytesIO as StringIO import hashlib import uuid from eet_ns import * from string import Template import base64 from utils import find_node envelope_template = Template('''<${soap}:Envelope xmlns:${soap}="${soap_url}"> <${soap_env}:Header xmlns:${soap_env}="${soap_env_url}"> <${wsse}:Security xmlns:${wsse}="${wsse_url}" xmlns:${wsu}="${wsu_url}" ${soap}:mustUnderstand="1"> <${wsse}:BinarySecurityToken ${wsu}:Id="${cert_id}" EncodingType="${encoding_base64_url}" ValueType="${value_x509_url}">${sec_token}</${wsse}:BinarySecurityToken> <${ds}:Signature xmlns:${ds}="${ds_url}" Id="${sig_id}"> <${ds}:SignedInfo xmlns:${ds}="${ds_url}"> <${ds}:CanonicalizationMethod Algorithm="${ec_url}"> <${ec}:InclusiveNamespaces xmlns:${ec}="${ec_url}" PrefixList="${soap}"/> </${ds}:CanonicalizationMethod> <${ds}:SignatureMethod Algorithm="${algo_sha256}"/> <${ds}:Reference URI="#${body_id}"> <${ds}:Transforms> <${ds}:Transform Algorithm="${ec_url}"> <${ec}:InclusiveNamespaces xmlns:${ec}="${ec_url}" PrefixList=""/> </${ds}:Transform> </${ds}:Transforms> <${ds}:DigestMethod Algorithm="${algo_digest_sha256}"/> <${ds}:DigestValue></${ds}:DigestValue> </${ds}:Reference> </${ds}:SignedInfo> <${ds}:SignatureValue></${ds}:SignatureValue> <${ds}:KeyInfo Id="${key_id}"> <${wsse}:SecurityTokenReference ${wsu}:Id="${sec_token_id}"> <${wsse}:Reference URI="#${cert_id}" ValueType="${value_x509_url}"/> </${wsse}:SecurityTokenReference> </${ds}:KeyInfo> </${ds}:Signature> </${wsse}:Security> </${soap_env}:Header> <${soap}:Body wsu:Id="${body_id}" xmlns:${wsu}="${wsu_url}" xmlns:${soap}="${soap_url}"></${soap}:Body> </${soap}:Envelope>''') namespaces_dict = { 'soap': NS_SOAP, 'soap_url': NS_SOAP_URL, 'soap_env': NS_SOAP_ENV, 'soap_env_url': NS_SOAP_ENV_URL, 'wsse': NS_WSSE, 'wsse_url': NS_WSSE_URL, 'wsu': NS_WSU, 'wsu_url': NS_WSU_URL, 'ds': NS_DS, 'ds_url': NS_DS_URL, 'ec': NS_EC, 'ec_url': NS_EC_URL, 'eet_url': NS_EET_URL, 'algo_sha256': ALGORITHM_SHA256, 'algo_digest_sha256': ALGORITHM_DIGEST_SHA256, 'value_x509_url': VALUE_X509_URL, 'encoding_base64_url': ENCODING_BASE64_URL } def get_normalized_subtree(node, includive_prefixes=[]): tree = etree.ElementTree(node) ss = StringIO() tree.write_c14n( ss, exclusive=True, inclusive_ns_prefixes=includive_prefixes) return ss.getvalue() def calculate_node_digest(node): data = get_normalized_subtree(node, ['soap']) return hashlib.sha256(data).digest() def soap_wsse(payload_node, signing): '''Stores payload_node into a SOAP envelope and calculates the wsse signature Keyword arguments: payload_node - top node for the payload (lxml.Element) signing - signing object (eet.Signing) ''' # Prepare parser parser = etree.XMLParser(remove_blank_text=True, ns_clean=False) # Prepare IDs for header body_id = 'id-'+uuid.uuid4().hex cert_id = 'X509-'+uuid.uuid4().hex sig_id = 'SIG-' + uuid.uuid4().hex key_id = 'KI-'+ uuid.uuid4().hex sec_token_id='STR-'+ uuid.uuid4().hex values = dict(namespaces_dict) values.update({'body_id': body_id, 'cert_id': cert_id, 'sig_id': sig_id, 'key_id': key_id, 'sec_token_id': sec_token_id, 'sec_token': base64.b64encode(signing.get_cert_binary()).decode('utf8')}) # Create SOAP envelope envelope = etree.XML(envelope_template.substitute(values), parser=parser) # Find soap:Body body = find_node(envelope, 'Body', NS_SOAP_URL) # Fill in Trzby into soap:Body body.append(payload_node) # Calculate digest of soap:Body body_digest = calculate_node_digest(body) # Find ds:DigestValue and store the computed digest digest_node = find_node(envelope, 'DigestValue', NS_DS_URL) digest_node.text = base64.b64encode(body_digest) # Find ds:SignedInfo node and get normalized text of it signature_node = find_node(envelope, 'SignedInfo', NS_DS_URL) normalized_signing = get_normalized_subtree(signature_node, ['soap']) # FInd ds:SignatureValue and store there signature of ds:SignedInfo signature_value_node = find_node(envelope, 'SignatureValue', NS_DS_URL) signature_value_node.text = base64.b64encode( signing.sign_text(normalized_signing, 'sha256')) return envelope
python
# Breaking down configuration File here! import json import os import sys from os import path from .constants import MANAGER_SCOPE, APPLICATION_SCOPE from .exceptions import ImplementorTypeNotFoundException class Settings: def __init__(self): # Loading and Reading from Config file self.conf_path = sys.argv[1] if path.isfile(self.conf_path): with open(self.conf_path) as json_data_file: self.config_data = json.load(json_data_file) else: raise IOError("Configuration file is missing!") self.config_boot = self.config_data["boot"][0] self.config_log = self.config_data["$log"] self.mqtt = self.config_boot.get("only_mqtt", False) self.config_cred = self.config_boot["rest"]["credentials"] self.config_http = self.config_boot["http"] self.config_redis = self.config_boot["redis"]["managers"] self.config_modules = self.config_boot["modules"] self.config_tls = self.config_boot["tls"] self.config_manufacturer = self.config_boot.get("manufacturer", {}) self.config_polling = self.config_boot.get("polling", {}) self.config_refresh = self.config_boot.get("token_refresher", {}) self.config_mqtt = self.config_boot.get("mqtt", {}) self.config_channel_templates = self.config_boot.get("channel_templates", {}) self.config_tcp = self.config_boot.get("tcp_udp_server", {}) self.enable_cors = self.config_boot.get("enable_cors", False) self.config_thread_pool = self.config_boot.get("thread_pool", {}) self.mqtt_channels = self.config_boot.get("mqtt_channels", []) self.client_id = self.config_cred["client_id"] self.client_secret = self.config_cred["client_secret"] # Muzzley API URI self.api_version = self.config_boot["rest"]["version"] # ex. v3 self.api_server = self.config_cred["server"] # ex. https://api.platform.integrations.muzzley.com self.api_server_full = "{}/{}".format(self.api_server, self.api_version) # ex. https://api.platform.integrations.muzzley.com/v3 # Manager Info Public parts = self.config_http["public"].split("://") self.schema_pub = parts[0] # ex. https self.host_pub = parts[1] # ex. fake.integrations.muzzley.com # Localhost parts = self.config_http["bind"].split(":") self.schema_loc = parts[0] # ex. http self.port = int(parts[2]) # ex. 60700 self.host_bind = parts[1].replace("//", "") # ex. localhost self.host_bind_port = "{}:{}".format(self.host_bind, self.port) # ex. localhost:60700 # Muzzley OAuth2.0 self.grant_type = self.config_cred["grant_type"] self.scope = self.config_cred["scope"] # All urls self.auth_url = "{}{}".format(self.api_server_full, "/auth/authorize") self.renew_url = "{}{}".format(self.api_server_full, "/auth/exchange") self.refresh_token_url = "{}/managers/{}/refresh-token".format(self.api_server_full, self.client_id) # Logging file path if "file" in self.config_log and self.config_log["file"] == "{log_path}": parts = self.conf_path.split("/") self.log_path = os.path.splitext(parts[len(parts) - 1])[0] + ".log" elif "file" in self.config_log and self.config_log["file"] != "": self.log_path = self.config_log["file"] else: self.log_path = "/var/log/syslog" # Setting up Redis Database self.redis_bind = self.config_redis["bind"] self.redis_db = self.config_redis["db"] parts = self.redis_bind.split(":") self.redis_host = parts[0] # ex. localhost self.redis_port = parts[1] # ex. 6379 # Picking out path of module that implements the skeleton self.skeleton_path = self.config_modules["skeleton_implementation"] # Getting TLS related data self.cert_path = self.config_tls["cert"] # Access Property self.access_property = "access" self.access_failed_value = "unreachable" # Identify skeleton/implementor type by scope parts = self.config_cred["scope"].split(' ') if MANAGER_SCOPE in parts: self.implementor_type = 'device' self.webhook_url = "{}{}{}".format(self.api_server_full, "/managers/", self.client_id) self.mqtt_topic = 'managers' elif APPLICATION_SCOPE in parts: self.implementor_type = 'application' self.webhook_url = f"{self.api_server_full}/applications/{self.client_id}" self.mqtt_topic = 'applications' else: raise ImplementorTypeNotFoundException('Error to find the implementor type in credentials, not device or ' 'application implementor!') # Application specific conf self.services = self.config_boot.get('services', []) self.usecases = self.config_boot.get('usecases', []) self.custom_endpoints = self.config_boot.get('custom_endpoints', []) self.channels_grant_access_to_user = self.config_boot.get('channels_grant_access_to_user', []) # The block stores all information obtained my manager through request to platform and # to be made available to multiple modules. self.block = { "access_token": "", "refresh_token": "", "expires": "", "code": "", "http_ep": "", "mqtt_ep": "", } def get_config(self): return self.config_data
python
# Copyright (c) Glow Contributors. See CONTRIBUTORS file. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import absolute_import, division, print_function, unicode_literals import torch from tests import utils class IndexPutModule(torch.nn.Module): def __init__(self, indices, accumulate=False): super(IndexPutModule, self).__init__() self.indices = indices self.accumulate = accumulate def forward(self, tensor, val): tensor.index_put_(self.indices, val, accumulate=self.accumulate) tensor = tensor + tensor return tensor class TestIndexPut(utils.TorchGlowTestCase): @utils.deterministic_expand( [ lambda: ( "basic", IndexPutModule([torch.tensor([1, 1]), torch.tensor([0, 1])]), torch.zeros(2, 3), torch.tensor([1.0, 2.0]), ), lambda: ( "3d_0", IndexPutModule( [torch.tensor([1, 1]), torch.tensor([0, 1]), torch.tensor([0, 1])] ), torch.zeros(2, 3, 4), torch.tensor([1.0, 2.0]), ), lambda: ( "3d_1", IndexPutModule( [ torch.tensor([1, 1, 0]), torch.tensor([0, 1, 1]), torch.tensor([0, 1, 0]), ] ), torch.zeros(2, 3, 4), torch.tensor([1.0, 2.0, 3.0]), ), lambda: ( "broadcast_value_0", IndexPutModule( [ torch.tensor([2, 0, 1]), torch.tensor([1, 2, 0]), torch.tensor([2, 0, 1]), ] ), torch.zeros(5, 3, 4), torch.tensor([1.0]), ), lambda: ( "broadcast_value_1", IndexPutModule( [ torch.tensor([1, 1, 2]), torch.tensor([0, 1, 2]), torch.tensor([0, 1, 3]), ] ), torch.zeros(5, 3, 4), torch.tensor([1.0]), ), lambda: ( "broadcast_value_2", IndexPutModule( [ torch.tensor([1, 1, 0]), torch.tensor([0, 1, 0]), ] ), torch.zeros(5, 3, 4), torch.tensor([1.0, 1.0, 1.0, 1.0]), ), lambda: ( "accumulate_basic", IndexPutModule([torch.tensor([1, 2]), torch.tensor([0, 1])]), torch.zeros(4, 3), torch.tensor([1.0, 2.0]), ), lambda: ( "accumulate_broadcast", IndexPutModule( [ torch.tensor([1, 1, 2]), torch.tensor([0, 1, 2]), torch.tensor([0, 1, 3]), ], True, ), torch.ones(5, 4, 6), torch.tensor([5.0]), ), lambda: ( "dim_0", IndexPutModule( [ torch.tensor([1]), ] ), torch.zeros(5, 3, 4), torch.tensor([5.0]), ), lambda: ( "dim_1", IndexPutModule( [ torch.tensor([1]), ] ), torch.tensor([[1.0, 2.0], [3.0, 4.0], [5.0, 6.0]]), torch.tensor([-3.0, -4.0]), ), lambda: ( "dim_2", IndexPutModule( [ torch.tensor([1, 0]), ] ), torch.tensor([[1.0, 2.0], [3.0, 4.0], [5.0, 6.0]]), torch.tensor([-3.0, -4.0]), ), lambda: ( "dim_3", IndexPutModule( [ torch.tensor([1, 0, 2]), ] ), torch.tensor([[1.0, 2.0], [3.0, 4.0], [5.0, 6.0]]), torch.tensor([[-3.0], [-4.0], [-5.0]]), ), ] ) def test_index_put(self, _, module, tensor, value): utils.compare_tracing_methods( module, tensor, value, fusible_ops={"aten::index_put_"} )
python
#!/usr/bin/python # -*- coding: utf-8 -*- from flask_marshmallow import Schema from marshmallow import fields class UserSchema(Schema): id = fields.String(required=True) email = fields.String(required=True) name = fields.String() bio = fields.String() user_schema = UserSchema()
python
__author__ = 'surya' import xml.etree.cElementTree as ET from datetime import datetime import experimentInfo, participantInfo def makePSIMIXMLFile(NewHitFile,exportPathFile,baitName): #<entrySet/> root = ET.Element("entrySet") root.set("minorVersion","0") root.set("version","0") root.set("level","3") root.set("xsi:schemaLocation", "http://psi.hupo.org/mi/mif300 http://psidev.cvs.sourceforge.net/viewvc/psidev/psi/mi/rel30/src/MIF300.xsd") root.set("xmlns:xsi", "http://www.w3.org/2001/XMLSchema-instance") root.set("xmlns", "http://psi.hupo.org/mi/mif300") tree = ET.ElementTree(root) index=1 for InteractionLine in NewHitFile: protDic={baitName:"not available",InteractionLine[0]:InteractionLine[2]}#Bait:"notavailbaleName, UniqueName:EntrezName qvalue=InteractionLine[4] # <entrySet><entry/> entry_element = ET.SubElement(root, "entry") # <entrySet><entry><experimentList/> expList_element = ET.SubElement(entry_element, "experimentList") ## check if the experiment is already annotated else do it and assign an id index=experimentInfo.writeExpInfo(expList_element,index) ############################################################################################ # <entrySet><entry><interactionList/> interactnList_element=ET.SubElement(entry_element, "interactionList") # <entrySet><entry><interactionList-interaction interaction_element=ET.SubElement(interactnList_element, "interaction") interaction_element.set("id",str(index)) index+=1 # <entrySet><entry><interactionList-interaction-experimentList int_expL_element = ET.SubElement(interaction_element, "experimentList") # <entrySet><entry><interactionList-interaction-experimentList-experimentRef ET.SubElement(int_expL_element, "experimentRef").text="1" # <entrySet><entry><interactionList-interaction-participantList int_partL = ET.SubElement(interaction_element, "participantList") # <entrySet><entry><interactionList-interaction-participantList-participant for prot in protDic: int_part_element = ET.SubElement(int_partL, "participant") int_part_element.set("id",str(index)) index+=1 # if prot not in ProteinName2IdDic: # ProteinName2IdDic[prot]=index int_part_int = ET.SubElement(int_part_element, "interactor") int_part_int.set("id",str(index)) index += 1 #run method participantInfo.addParticipantInfo(int_part_int,prot,protDic[prot]) # else: # ET.SubElement(int_part_element, "interactorRef").text=str(ProteinName2IdDic[prot]) int_confList= ET.SubElement(interaction_element, "confidenceList") int_confL_conf=ET.SubElement(int_confList, "confidence") conf_unit=ET.SubElement(int_confL_conf, "unit") conf_unit_names=ET.SubElement(conf_unit, "names") ET.SubElement(conf_unit_names, "shortLabel").text="Rank Based p-value" ET.SubElement(conf_unit_names, "fullName").text="MAPPI-DAT based analysis score" ET.SubElement(int_confL_conf, "value").text=str(qvalue) ## write the file tree.write(exportPathFile, encoding='utf-8', xml_declaration=True)
python
import struct, csv, pprint def calculate_mode_mask(index, ttc_comm, adcs, rw, imu, st, mtr, css, fss, cp): mode_value = 0 mode_value |= (ttc_comm & 0x1) << 0 mode_value |= (adcs & 0x1) << 1 mode_value |= (rw & 0x1) << 2 mode_value |= (imu & 0x1) << 3 mode_value |= (st & 0x1) << 4 mode_value |= (mtr & 0x1) << 5 mode_value |= (css & 0x1) << 6 mode_value |= (fss & 0x1) << 7 mode_value |= (cp & 0x1) << 8 mode_mask = struct.pack("<h", mode_value) print(f"Mode Mask Index {index}: Decimal: {mode_value}, Hex: 0x{mode_mask.hex()}") return mode_mask.hex() def read_mode_csv(mode_table_file): with open(mode_table_file, newline='') as csvfile: data = csv.DictReader(csvfile) for row in data: # pprint.pprint(row) calculate_mode_mask(index=int(row["MODE"]), ttc_comm=int(row["TTC"]), adcs=int(row["ADCS"]), rw=int(row["RW"]), imu=int(row["IMU"]), st=int(row["ST"]), mtr=int(row["MTR"]), css=int(row["CSS"]), fss=int(row["FSS"]), cp=int(row["CP"])) # pprint.pprint(data) # calculate_mode_mask(ttc_comm=1, adcs=1, rw=1, imu=1, st=0, mtr=0, css=1, fss=0, cp=0) print("Modes for Correct Operation") read_mode_csv("modes.csv") print("\n") print("Modes for Broken Startup") read_mode_csv("modes_broken.csv") print("\n") print("Modes for FSS + Payload") read_mode_csv("modes_fss_payload.csv")
python
#!/usr/bin/python2.4 # Copyright (c) 2006-2008 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. '''Unit tests for TxtFile gatherer''' import os import sys if __name__ == '__main__': sys.path.append(os.path.join(os.path.dirname(sys.argv[0]), '../..')) import StringIO import unittest from grit.gather import txt class TxtUnittest(unittest.TestCase): def testGather(self): input = StringIO.StringIO('Hello there\nHow are you?') gatherer = txt.TxtFile.FromFile(input) gatherer.Parse() self.failUnless(gatherer.GetText() == input.getvalue()) self.failUnless(len(gatherer.GetCliques()) == 1) self.failUnless(gatherer.GetCliques()[0].GetMessage().GetRealContent() == input.getvalue()) if __name__ == '__main__': unittest.main()
python
from django import forms from .models import Post class NewPostForm(forms.ModelForm): class Meta: model = Post exclude = ['author','url','likes']
python
from plugnparse import entrypoint, ParserFactory
python
# -*- coding: utf8 -*- # ============LICENSE_START==================================================== # org.onap.vvp/validation-scripts # =================================================================== # Copyright © 2017 AT&T Intellectual Property. All rights reserved. # =================================================================== # # Unless otherwise specified, all software contained herein is licensed # under the Apache License, Version 2.0 (the "License"); # you may not use this software except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # # # Unless otherwise specified, all documentation contained herein is licensed # under the Creative Commons License, Attribution 4.0 Intl. (the "License"); # you may not use this documentation except in compliance with the License. # You may obtain a copy of the License at # # https://creativecommons.org/licenses/by/4.0/ # # Unless required by applicable law or agreed to in writing, documentation # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # ============LICENSE_END============================================ # # """ environment file structure """ from tests import cached_yaml as yaml from .helpers import validates VERSION = "1.0.0" # pylint: disable=invalid-name @validates("R-03324") def test_environment_file_contains_required_sections(env_file): """ Check that all environments files only have the allowed sections """ required_keys = ["parameters"] with open(env_file) as fh: yml = yaml.load(fh) missing_keys = [v for v in required_keys if v not in yml] assert not missing_keys, "%s missing %s" % (env_file, missing_keys)
python
# -*- coding: utf-8 -*- # Python implementation of the LexRank algorithm. # Reference - LexRank: Graph-based Centrality as Salience in Text Summarization # Reference URL - http://tangra.si.umich.edu/~radev/lexrank/lexrank.pdf # Author - Janu Verma # email - jv367@cornell.edu # http://januverma.wordpress.com/ # @januverma import sys import os import operator import networkx as nx from TextGraphics.src.graph import TextGraph from TextGraphics import Data class LexRank: """ Constructs a summary of the input document by extracting most informative sentences. Arguments: directory - A corpus of text files to be summarized. """ def __init__(self, directory): self.graph = TextGraph(directory) def lexR(self, graph): """ Compute the LexRank of the sentences. LexRank of a sentence in the sentence graph is the PageRank of the node representing the sentence. It is a measure of the importance and influence of the sentence in the corpus. Arguments: graph - A networkx graph or digraph. Returns: A dictionary of all the nodes with their PageRank scores. """ pr = nx.pagerank_numpy(graph, alpha=0.85) return pr def summary(self, compression = 0.25): """ Builds the summary based on the LexRank scores of the sentences. Arguments: compression : A number in [0,1] which is equal to the fraction of total sentences to be included in the summary. Default value is 0.25 Returns: Summary of the input document. """ g = self.graph.sentenceGraph() total_sentences = len(g.nodes()) n_sentences = int(total_sentences * compression) rankings = self.lexR(g) ranked_sentences = sorted(rankings.iteritems(), key=operator.itemgetter(1), reverse=True) summary_sentences = "" i = 0 while (i < n_sentences): u,v = ranked_sentences[i] summary_sentences += u i = i + 1 return summary_sentences
python
from .transaction import TxInput, TxOutput, Transaction, InsufficientFunds from .unspent import Unspent
python
import unittest from app import db from app.models import User,BlogPost class BlogPostTest(unittest.TestCase): def setUp(self): self.user_john = User(username = 'john',password = 'johnjohn') self.new_blog = Blog(content='johnjohnjohn') def test_check_instance_variable(self): self.assertEquals(self.new_blog.content,'johnjohnjohn')
python
import pandas as pd from sosia.processing.caching import insert_data, retrieve_authors,\ retrieve_authors_from_sourceyear from sosia.processing.querying import query_pubs_by_sourceyear, stacked_query def get_authors(authors, conn, refresh=False, verbose=False): """Wrapper function to search author data for a list of authors, searching first in the SQL database and then via stacked search. Parameters ---------- authors : list List of Scopus Author IDs to search. conn : sqlite3 connection Standing connection to a SQLite3 database. refresh : bool (optional, default=False) Whether to refresh scopus cached files if they exist, or not. verbose : bool (optional, default=False) Whether to print information on the search progress. Returns ------- data : DataFrame Data on the provided authors. """ from string import Template # Retrieve existing data from SQL cache authors = pd.DataFrame(authors, columns=["auth_id"], dtype="int64") data, missing = retrieve_authors(authors, conn) # Query missing records and insert at the same time if missing: params = {"group": missing, "refresh": refresh, "joiner": ") OR AU-ID(", "q_type": "author", "template": Template("AU-ID($fill)"), "stacked": True, "verbose": verbose} if verbose: print("Pre-filtering...") res = stacked_query(**params) res = pd.DataFrame(res) insert_data(res, conn, table="authors") data, _ = retrieve_authors(authors, conn) return data def get_authors_from_sourceyear(df, conn, refresh=False, stacked=False, verbose=False): """Get authors publishing in specified sourced in specified years. Handles retrieving data, and in case of missing data querying for it and inserting it into the SQL database. Parameters ---------- df : DataFrame DataFrame of source-year-combinations to be searched for. conn : sqlite3 connection Standing connection to an SQLite3 database. refresh : bool (optional, default=False) Whether to refresh cached search files. stacked : bool (optional, default=False) Whether to use fewer queries that are not reusable, or to use modular queries of the form "SOURCE-ID(<SID>) AND PUBYEAR IS <YYYY>". verbose : bool (optional, default=False) Whether to print information on the search progress. Returns ------- data : DataFrame DataFrame in format ("source_id", "year", "auids", "afid"), where entries correspond to an individual paper. """ # Retrieve information in cache data, missing = retrieve_authors_from_sourceyear(df, conn, refresh=refresh) # Download and add missing data to_add = pd.DataFrame() empty = [] for year in missing["year"].unique(): subset = missing[missing["year"] == year] sources = subset["source_id"].unique() new = query_pubs_by_sourceyear(sources, year, refresh=refresh, stacked=stacked, verbose=verbose) no_info = set(sources) - set(new["source_id"].unique()) empty.extend([(s, year) for s in no_info]) to_add = to_add.append(new) # Format useful information data = data.append(to_add) data = data[data["auids"] != ""] data["auids"] = data["auids"].str.replace(";", ",").str.split(",") # Insert new information and information on missing data if empty: sources, years = list(zip(*empty)) d = {"source_id": sources, "year": years, "auids": [""]*len(sources), "afid": [""]*len(sources)} to_add = to_add.append(pd.DataFrame(d)) if not to_add.empty: to_add["auids"] = to_add["auids"].str.replace(";", ",").str.split(",") insert_data(to_add, conn, table="sources_afids") return data
python
from math import cos, sin, radians from random import random import pygame from events_handler import check_win from player import Player pygame.mixer.init() class Ball: RADIUS: int = 17 SPEED: int = 4 click_sound = pygame.mixer.Sound("./assets/click.wav") wall_sound = pygame.mixer.Sound("./assets/ball_wall.wav") def __init__(self, game): super().__init__() self.game = game self.surface: pygame.Surface = game.screen self.radius: int = self.__class__.RADIUS * game.config.get_float("window-size-multipier") self.pos = pygame.math.Vector2(self.surface.get_width() / 2, self.surface.get_height() / 2) self.color = self.game.config.get_color('ball-color', True) self.vector = { "x": 0, "y": 0, "angle": (random() * 360) } def tick(self): delta_y = Ball.SPEED * cos(radians(self.vector["angle"])) delta_x = Ball.SPEED * sin(radians(self.vector["angle"])) self.pos.x += delta_x self.pos.y += delta_y angle = self.vector["angle"] if self.pos.x <= self.game.__class__.MARGIN + Player.WIDTH * self.game.config.get_float("window-size-multipier"): if self.game.player1.y < self.pos.y < self.game.player1.y + Player.HEIGHT * self.game.config.get_float("window-size-multipier"): angle = 360 - angle angle += (random() * 20) - 10 Ball.wall_sound.play() elif self.pos.x >= self.surface.get_width() - self.game.__class__.MARGIN - Player.WIDTH * self.game.config.get_float("window-size-multipier"): if self.game.player2.y < self.pos.y < self.game.player2.y + Player.HEIGHT * self.game.config.get_float("window-size-multipier"): angle = 180 + (180 - angle) angle += (random() * 20) - 10 Ball.wall_sound.play() if self.pos.y <= self.game.__class__.MARGIN: # ball up angle -= 2*(angle - 90) Ball.wall_sound.play() if self.pos.y >= self.surface.get_height() - self.game.__class__.MARGIN: # ball down Ball.wall_sound.play() if angle < 360: # ball down-left angle = 270 - (angle - 270) else: # ball down-right angle = 270 + angle while angle > 360: angle -= 360 self.vector["angle"] = angle check_win(self.game) def draw(self): pygame.draw.circle(self.surface, self.color, self.pos, self.radius) def teleport(self, x: int, y: int): self.pos.x = x self.pos.y = y def reset(self): self.teleport(self.surface.get_width() / 2, self.surface.get_height() / 2) self.vector["angle"] = random() * 360
python
# Support code for building a C extension with xxhash files # # Copyright (c) 2016-present, Gregory Szorc (original code for zstd) # 2017-present, Thomas Waldmann (mods to make it more generic, code for blake2) # 2020-present, Gianfranco Costamagna (code for xxhash) # All rights reserved. # # This software may be modified and distributed under the terms # of the BSD license. See the LICENSE file for details. import os # xxhash files, structure as seen in XXHASH (reference implementation) project repository: xxhash_sources = [ 'xxhash.c', ] xxhash_includes = [ '.', ] def xxhash_system_prefix(prefixes): for prefix in prefixes: filename = os.path.join(prefix, 'include', 'xxhash.h') if os.path.exists(filename): with open(filename, 'rb') as fd: if b'XXH64_digest' in fd.read(): return prefix def xxhash_ext_kwargs(bundled_path, system_prefix=None, system=False, **kwargs): """amend kwargs with xxhash stuff for a distutils.extension.Extension initialization. bundled_path: relative (to this file) path to the bundled library source code files system_prefix: where the system-installed library can be found system: True: use the system-installed shared library, False: use the bundled library code kwargs: distutils.extension.Extension kwargs that should be amended returns: amended kwargs """ def multi_join(paths, *path_segments): """apply os.path.join on a list of paths""" return [os.path.join(*(path_segments + (path, ))) for path in paths] use_system = system and system_prefix is not None sources = kwargs.get('sources', []) if not use_system: sources += multi_join(xxhash_sources, bundled_path) include_dirs = kwargs.get('include_dirs', []) if use_system: include_dirs += multi_join(['include'], system_prefix) else: include_dirs += multi_join(xxhash_includes, bundled_path) library_dirs = kwargs.get('library_dirs', []) if use_system: library_dirs += multi_join(['lib'], system_prefix) libraries = kwargs.get('libraries', []) if use_system: libraries += ['xxhash', ] extra_compile_args = kwargs.get('extra_compile_args', []) if not use_system: extra_compile_args += [] # not used yet ret = dict(**kwargs) ret.update(dict(sources=sources, extra_compile_args=extra_compile_args, include_dirs=include_dirs, library_dirs=library_dirs, libraries=libraries)) return ret
python
import os from collections import defaultdict import json import logging import dateutil from django.contrib import messages from django.db import transaction from django.db.models import Count, Sum, Q from django.http import HttpResponse from django.shortcuts import redirect from django.urls import reverse from django.utils.timezone import now from django.utils.translation import ugettext_lazy as _ from django.views.generic import DetailView, ListView from pretix.base.models import Order, OrderPayment from pretix.control.permissions import EventPermissionRequiredMixin, OrganizerPermissionRequiredMixin from pretix_sepadebit.models import SepaExport, SepaExportOrder from sepaxml import SepaDD, validation from pretix.control.views.organizer import OrganizerDetailViewMixin logger = logging.getLogger(__name__) class ExportListView(ListView): template_name = 'pretix_sepadebit/export.html' model = SepaExport context_object_name = 'exports' def get_unexported(self): raise NotImplementedError() def get_context_data(self, **kwargs): ctx = super().get_context_data() ctx['num_new'] = self.get_unexported().count() ctx['basetpl'] = "pretixcontrol/event/base.html" if not hasattr(self.request, 'event'): ctx['basetpl'] = "pretixcontrol/organizers/base.html" return ctx def _config_for_event(self, event): if event not in self._event_cache: self._event_cache[event] = ( ("name", event.settings.payment_sepadebit_creditor_name), ("IBAN", event.settings.payment_sepadebit_creditor_iban), ("BIC", event.settings.payment_sepadebit_creditor_bic), ("batch", True), ("creditor_id", event.settings.payment_sepadebit_creditor_id), ("currency", event.currency) ) return self._event_cache[event] def post(self, request, *args, **kwargs): self._event_cache = {} valid_payments = defaultdict(list) files = {} for payment in self.get_unexported().select_related('order', 'order__event'): if not payment.info_data: # Should not happen # TODO: Notify user payment.state = OrderPayment.PAYMENT_STATE_FAILED payment.save() payment.order.status = Order.STATUS_PENDING payment.order.save() continue payment_dict = { "name": payment.info_data['account'], "IBAN": payment.info_data['iban'], "BIC": payment.info_data['bic'], "amount": int(payment.amount * 100), "type": "OOFF", "collection_date": max(now().date(), dateutil.parser.parse(payment.info_data['date']).date()), "mandate_id": payment.info_data['reference'], "mandate_date": (payment.order.datetime if payment.migrated else payment.created).date(), "description": _('Event ticket {event}-{code}').format( event=payment.order.event.slug.upper(), code=payment.order.code ) } config = self._config_for_event(payment.order.event) if config not in files: files[config] = SepaDD(dict(config), schema='pain.008.003.02') file = files[config] file.add_payment(payment_dict) valid_payments[file].append(payment) if valid_payments: with transaction.atomic(): for k, f in list(files.items()): if hasattr(request, 'event'): exp = SepaExport(event=request.event, xmldata='') exp.testmode = request.event.testmode else: exp = SepaExport(organizer=request.organizer, xmldata='') exp.testmode = False exp.xmldata = f.export(validate=False).decode('utf-8') import xmlschema # xmlschema does some weird monkeypatching in etree, if we import it globally, things fail my_schema = xmlschema.XMLSchema( os.path.join(os.path.dirname(validation.__file__), 'schemas', f.schema + '.xsd') ) errs = [] for e in my_schema.iter_errors(exp.xmldata): errs.append(str(e)) if errs: messages.error(request, _('The generated file did not validate for the following reasons. ' 'Please contact pretix support for more information.\n{}').format( "\n".join(errs))) del files[k] else: exp.currency = f._config['currency'] exp.save() SepaExportOrder.objects.bulk_create([ SepaExportOrder(order=p.order, payment=p, export=exp, amount=p.amount) for p in valid_payments[f] ]) if len(files) > 1: messages.warning(request, _('Multiple new export files have been created, since your events ' 'have differing SEPA settings. Please make sure to process all of them!')) elif len(files) > 0: messages.success(request, _('A new export file has been created.')) else: messages.warning(request, _('No valid orders have been found.')) if hasattr(request, 'event'): return redirect(reverse('plugins:pretix_sepadebit:export', kwargs={ 'event': request.event.slug, 'organizer': request.organizer.slug, })) else: return redirect(reverse('plugins:pretix_sepadebit:export', kwargs={ 'organizer': request.organizer.slug, })) class DownloadView(DetailView): model = SepaExport def get(self, request, *args, **kwargs): self.object = self.get_object() resp = HttpResponse(self.object.xmldata, content_type='application/xml') resp['Content-Disposition'] = 'attachment; filename="{}-{}.xml"'.format( self.request.event.slug.upper() if hasattr(self.request, 'event') else self.request.organizer.slug.upper(), self.object.datetime.strftime('%Y-%m-%d-%H-%M-%S'), ) return resp class OrdersView(DetailView): model = SepaExport context_object_name = 'export' template_name = 'pretix_sepadebit/orders.html' def get_context_data(self, **kwargs): ctx = super().get_context_data(**kwargs) ctx['seorders'] = self.object.sepaexportorder_set.select_related('order', 'payment').prefetch_related( 'order__invoices', 'order__event') ctx['total'] = self.object.sepaexportorder_set.aggregate(sum=Sum('amount'))['sum'] ctx['basetpl'] = "pretixcontrol/event/base.html" if not hasattr(self.request, 'event'): ctx['basetpl'] = "pretixcontrol/organizers/base.html" return ctx class EventExportListView(EventPermissionRequiredMixin, ExportListView): permission = 'can_change_orders' def get_queryset(self): return SepaExport.objects.filter( event=self.request.event ).annotate( cnt=Count('sepaexportorder'), sum=Sum('sepaexportorder__amount'), ).order_by('-datetime') def get_unexported(self): return OrderPayment.objects.filter( order__event=self.request.event, provider='sepadebit', state=OrderPayment.PAYMENT_STATE_CONFIRMED, order__testmode=self.request.event.testmode, sepaexportorder__isnull=True ) class EventDownloadView(EventPermissionRequiredMixin, DownloadView): permission = 'can_change_orders' def get_object(self, *args, **kwargs): return SepaExport.objects.get( event=self.request.event, pk=self.kwargs.get('id') ) class EventOrdersView(EventPermissionRequiredMixin, OrdersView): permission = 'can_change_orders' def get_object(self, *args, **kwargs): return SepaExport.objects.get( event=self.request.event, pk=self.kwargs.get('id') ) class OrganizerDownloadView(OrganizerPermissionRequiredMixin, OrganizerDetailViewMixin, DownloadView): permission = 'can_change_organizer_settings' def get_object(self, *args, **kwargs): return SepaExport.objects.get( organizer=self.request.organizer, pk=self.kwargs.get('id') ) class OrganizerOrdersView(OrganizerPermissionRequiredMixin, OrganizerDetailViewMixin, OrdersView): permission = 'can_change_organizer_settings' def get_object(self, *args, **kwargs): return SepaExport.objects.get( organizer=self.request.organizer, pk=self.kwargs.get('id') ) class OrganizerExportListView(OrganizerPermissionRequiredMixin, OrganizerDetailViewMixin, ExportListView): permission = 'can_change_organizer_settings' def get_queryset(self): return SepaExport.objects.filter( Q(organizer=self.request.organizer) | Q(event__organizer=self.request.organizer) ).annotate( cnt=Count('sepaexportorder'), sum=Sum('sepaexportorder__amount'), ).order_by('-datetime') def get_unexported(self): return OrderPayment.objects.filter( order__event__organizer=self.request.organizer, provider='sepadebit', state=OrderPayment.PAYMENT_STATE_CONFIRMED, order__testmode=False, sepaexportorder__isnull=True )
python
''' module for importing all functions ''' from pyalgo import * ''' PyAlgo - Maths ''' from pyalgo.maths import * from pyalgo.maths.catalan_numbers import catalan from pyalgo.maths.factorial import factorial from pyalgo.maths.fibonnaci_numbers import fibonacci from pyalgo.maths.gcd import gcd, lcm from pyalgo.maths.power import mod_power, big_power from pyalgo.maths.prime import prime from pyalgo.maths.sieve import sieve from pyalgo.maths.totient_function import totient from pyalgo.maths.gray_code import gray_code from pyalgo.maths.convex_hull import convex_hull ''' PyAlgo - Searching ''' from pyalgo.search import * from pyalgo.search.binary_search import binary_search from pyalgo.search.exponential_search import exponential_search from pyalgo.search.interpolation_search import interpolation_search from pyalgo.search.jump_search import jump_search from pyalgo.search.ternary_search import ternary_search from pyalgo.search.kmp_algorithm import kmp from pyalgo.search.rabin_karp import rabin_karp from pyalgo.search.z_algorithm import z_algorithm ''' PyAlgo - CP ''' from pyalgo.cp import * from pyalgo.cp.FastIO import * from pyalgo.cp.template import * ''' PyAlgo - Sort ''' from pyalgo.sort import * from pyalgo.sort.bubble_sort import bubble_sort from pyalgo.sort.heap_sort import heap_sort from pyalgo.sort.insertion_sort import insertion_sort from pyalgo.sort.merge_sort import merge_sort from pyalgo.sort.quick_sort import quick_sort, quick_sort_recursive from pyalgo.sort.selection_sort import selection_sort from pyalgo.sort.counting_sort import counting_sort from pyalgo.sort.radix_sort import radix_sort from pyalgo.sort.bucket_sort import bucket_sort from pyalgo.sort.shell_sort import shell_sort from pyalgo.sort.pigeonhole_sort import pigeonhole_sort from pyalgo.sort.cycle_sort import cycle_sort ''' PyAlgo - Graph ''' from pyalgo.graph import * from pyalgo.graph.mst import * from pyalgo.graph.bfs import bfs, bfs_paths from pyalgo.graph.dfs import dfs, dfs_paths from pyalgo.graph.mst.prim_mst import * ''' PyAlgo Devansh Singh, 2021 '''
python
#!/usr/bin/env python # coding: utf-8 #!/usr/bin/env python2 # -*- coding: utf-8 -*- """Created on Jul 2021. @author: Wanderson Neto """ import os from convert import convert def inicio(): print('###################') print(' ##############') print(' ##########') print(' #####') print(' #') print(' #####') print(' ##########') print(' ##############') print('###################') print('Bem-vindo ao programa para converter arquivos pdf para txt') file = input('Entre com o caminho do arquivo .pdf para a conversão') file = '/media/dgbe/HD/appPdfTotxt/pdftotxt/entrada/MODELO DE PROJETO ACADEPOL.pdf' head, tail = os.path.split(file) text = convert.pdf(file) with open('result/' + tail + '.txt', 'w') as f: f.write(text)
python
import sys fileName = "C:\\Users\\suagrawa\\Optimization-Python\\Regression\\input" data = [] def readFromFile(fileName): with open(fileName) as f: content = f.readlines() content = [x.strip() for x in content] for item in content: row = [int(el) for el in item.split(',')] data.append(row) return data def sign(row, weights): firstValue = row[0] * weights[0] secondValue = row[1] * weights[1] sum = weights[2] + firstValue + secondValue return 1 if sum >= 0 else -1 def perceptronAlgorithm(data): weights = [0 for i in range(len(data[0]))] result = "" while True: isFinal = True for i in range(0, len(data)): expected = data[i][2] predicted = sign(data[i], weights) if expected * predicted <= 0: isFinal = False weights[0] = weights[0] + expected * data[i][0] weights[1] = weights[1] + expected * data[i][1] weights[2] = weights[2] + expected if isFinal: result += str(weights[0]) + ", " + str(weights[1]) + ", " + str(weights[2]) break else: result += str(weights[0]) + ", " + str(weights[1]) + ", " + str(weights[2]) + "\n" def writeToFile(result): outputFileName = sys.argv[2] f = open(outputFileName, 'w') f.write(result) f.close() data = readFromFile(fileName) print(data) result = perceptronAlgorithm(data) print(result) #writeToFile(result)
python
#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ Created on Sat Mar 2 18:56:24 2019 @author: descentis """ import os from multiprocessing import Process, Lock import time import numpy as np import glob import difflib import xml.etree.ElementTree as ET import math import textwrap import html import requests import io class wikiConverter(object): instance_id = 1 def indent(self,elem, level=0): i = "\n" + level*" " if len(elem): if not elem.text or not elem.text.strip(): elem.text = i + " " if not elem.tail or not elem.tail.strip(): elem.tail = i for elem in elem: self.indent(elem, level+1) if not elem.tail or not elem.tail.strip(): elem.tail = i else: if level and (not elem.tail or not elem.tail.strip()): elem.tail = i @staticmethod def wiki_file_writer(elem,myFile,prefix): global instance_id t = '\t' Instance = t+t+"<Instance " for ch_elem in elem: if(('id' in ch_elem.tag) and ('parentid' not in ch_elem.tag)): Instance = Instance+ "Id="+'"'+str(wikiConverter.instance_id)+'"'+" InstanceType="+'"'+"Revision/Wiki"+'"'+" RevisionId="+ '"'+str(ch_elem.text)+'"'+">\n" myFile.write(Instance) ''' RevisionId = t+t+t+"<RevisionId>"+ch_elem.text+"</RevisionId>\n" myFile.write(RevisionId) ''' ''' if(ch_elem.tag==prefix+'parentid'): ParentId = t+t+t+"<ParentId>"+ch_elem.text+"</ParentId>\n" myFile.write(ParentId) ''' ''' Timestamp Information ''' if('timestamp' in ch_elem.tag): ''' if(f_p!=1): Instance = Instance+" InstanceType= "+'"'+"wiki/text"+'"'+">\n" myFile.write(Instance) ''' Timestamp = t+t+t+"<TimeStamp>\n" myFile.write(Timestamp) CreationDate = t+t+t+t+"<CreationDate>"+ch_elem.text[:-1]+'.0'+"</CreationDate>\n" myFile.write(CreationDate) Timestamp = t+t+t+"</TimeStamp>\n" myFile.write(Timestamp) ''' Contributors information ''' if('contributor' in ch_elem.tag): Contributors = t+t+t+"<Contributors>\n" myFile.write(Contributors) for contrib in ch_elem: if('ip' in contrib.tag): LastEditorUserName = t+t+t+t+"<OwnerUserName>"+html.escape(contrib.text)+"</OwnerUserName>\n" myFile.write(LastEditorUserName) else: if('username' in contrib.tag): try: LastEditorUserName = t+t+t+t+"<OwnerUserName>"+html.escape(contrib.text)+"</OwnerUserName>\n" except: LastEditorUserName = t+t+t+t+"<OwnerUserName>None</OwnerUserName>\n" myFile.write(LastEditorUserName) if(('id' in contrib.tag) and ('parentid' not in contrib.tag)): LastEditorUserId = t+t+t+t+"<OwnerUserId>"+contrib.text+"</OwnerUserId>\n" myFile.write(LastEditorUserId) Contributors = t+t+t+"</Contributors>\n" myFile.write(Contributors) ''' Body/Text Information ''' if('text' in ch_elem.tag): Body = t+t+t+"<Body>\n" myFile.write(Body) if(ch_elem.attrib.get('bytes')!=None): text_field = t+t+t+t+"<Text Type="+'"'+"wiki/text"+'"'+" Bytes="+'"'+ch_elem.attrib['bytes']+'">\n' elif(ch_elem.text != None): text_field = t+t+t+t+"<Text Type="+'"'+"wiki/text"+'"'+" Bytes="+'"'+str(len(ch_elem.text))+'">\n' else: text_field = t+t+t+t+"<Text Type="+'"'+"wiki/text"+'"'+" Bytes="+'"'+str(0)+'">\n' myFile.write(text_field) if(ch_elem.text == None): text_body = ""; else: text_body = textwrap.indent(text=ch_elem.text, prefix=t+t+t+t+t) text_body = html.escape(text_body) Body_text = text_body+"\n" myFile.write(Body_text) text_field = t+t+t+t+"</Text>\n" myFile.write(text_field) Body = t+t+t+"</Body>\n" myFile.write(Body) if('comment' in ch_elem.tag): Edit = t+t+t+"<EditDetails>\n" myFile.write(Edit) if(ch_elem.text == None): text_body = ""; else: text_body = textwrap.indent(text=ch_elem.text, prefix=t+t+t+t+t) text_body = html.escape(text_body) EditType = t+t+t+t+"<EditType>\n"+text_body+"\n"+t+t+t+t+"</EditType>\n" #Body_text = text_body+"\n" myFile.write(EditType) Edit = t+t+t+"</EditDetails>\n" myFile.write(Edit) if('sha1' in ch_elem.tag): sha = ch_elem.text if(type(sha)!=type(None)): shaText = t+t+t+'<Knowl key="sha">'+sha+'</Knowl>\n' myFile.write(shaText) else: shaText = '' Instance = t+t+"</Instance>\n" myFile.write(Instance) wikiConverter.instance_id+=1 @staticmethod def wiki_knolml_converter(name, *args, **kwargs): #global instance_id #Creating a meta file for the wiki article # To get an iterable for wiki file file_name = name context_wiki = ET.iterparse(file_name, events=("start","end")) # Turning it into an iterator context_wiki = iter(context_wiki) # getting the root element event_wiki, root_wiki = next(context_wiki) file_name = name[:-4]+'.knolml' file_path = file_name if kwargs.get('output_dir')!=None: file_path = file_path.replace('output','wikipedia_articles') if not os.path.exists(file_path): with open(file_path,"w",encoding='utf-8') as myFile: myFile.write("<?xml version='1.0' encoding='utf-8'?>\n") myFile.write("<KnolML>\n") myFile.write('<Def attr.name="sha" attrib.type="string" for="Instance" id="sha"/>\n') prefix = '{http://www.mediawiki.org/xml/export-0.10/}' #In case of Wikipedia, prefic is required f = 0 title_text = '' try: for event, elem in context_wiki: if event == "end" and 'id' in elem.tag: if(f==0): with open(file_path,"a",encoding='utf-8') as myFile: myFile.write("\t<KnowledgeData "+"Type="+'"'+"Wiki/text/revision"+'"'+" Id="+'"'+elem.text+'"'+">\n") f=1 if event == "end" and 'title' in elem.tag: title_text = elem.text if(f==1 and title_text!=None): Title = "\t\t<Title>"+title_text+"</Title>\n" with open(file_path,"a",encoding='utf-8') as myFile: myFile.write(Title) title_text = None if event == "end" and 'revision' in elem.tag: with open(file_path,"a",encoding='utf-8') as myFile: wikiConverter.wiki_file_writer(elem,myFile,prefix) elem.clear() root_wiki.clear() except: print("found problem with the data: "+ file_name) with open(file_path,"a",encoding='utf-8') as myFile: myFile.write("\t</KnowledgeData>\n") myFile.write("</KnolML>\n") wikiConverter.instance_id = 1 @staticmethod def is_number(s): try: int(s) return True except ValueError: return False @staticmethod def encode(str1, str2): output = "" s = [x.replace("\n", "`").replace("-", "^") for x in str1.split(" ")] s2 = [x.replace("\n", "`").replace("-", "^") for x in str2.split(" ")] i = 0 while(True): if i == len(s): break; if s[i].isspace() or s[i] == '': del s[i] else: i += 1 i = 0 while(True): if i == len(s2): break; if s2[i].isspace() or s2[i] == '': del s2[i] else: i += 1 d = difflib.Differ() result = list(d.compare(s, s2)) pos = 0 neg = 0 for x in result: if x[0] == " ": pos += 1 if neg != 0: output += "-"+str(neg)+" " neg = 0 elif x[0] == "-": neg += 1 if pos != 0: output += str(pos)+" " pos = 0 elif x[0] != "?": if pos != 0: output += str(pos)+" " pos = 0 if neg != 0: output += "-"+str(neg)+" " neg = 0 if wikiConverter.is_number(x[2:]): output += "'"+x[2:]+"' " else: output += x[2:]+" " if pos != 0: output += str(pos)+" " if neg != 0: output += "-"+str(neg)+" " return output.replace("\t\t\t", "") #Main function @staticmethod def compress(file_name, directory): # file_name = input("Enter path of KML file:") tree = ET.parse(file_name) r = tree.getroot() for child in r: if('KnowledgeData' in child.tag): child.attrib['Type'] = 'Wiki/text/revision/compressed' root = child last_rev = "" length = len(root.findall('Instance')) print(length, "revisions found") count = 0 intervalLength = int((math.log(length)) ** 2); # Keep the Orginal text after every 'm' revisions m = intervalLength+1 for each in root.iter('Text'): count += 1 if m != intervalLength+1: current_str = each.text each.text = wikiConverter.encode(prev_str, current_str) prev_str = current_str # print("Revision ", count, " written") m = m - 1 if m == 0: m = intervalLength+1 else: prev_str = each.text # print("Revision ", count, " written") m = m - 1 continue print("KnolML file created") # Creating directory if not os.path.exists(directory): os.mkdir(directory) # Changing file path to include directory file_name = file_name.split('/') file_name = directory+'/'+file_name[-1] ''' file_name.insert(-1, directory) separator = '/' file_name = separator.join(file_name) ''' tree.write(file_name[:-7]+'.knolml') f = open(file_name[:-7]+'.knolml') f_str = f.read() f.close() f2 = open(file_name[:-7]+'.knolml', "w") f2.write("<?xml version='1.0' encoding='utf-8'?>\n"+f_str) f2.close() @staticmethod def wikiConvert(*args, **kwargs): if(kwargs.get('output_dir')!=None): output_dir = kwargs['output_dir'] if(kwargs.get('file_name')!=None): file_name = kwargs['file_name'] wikiConverter.wiki_knolml_converter(file_name) file_name = file_name[:-4] + '.knolml' wikiConverter.compress(file_name,output_dir) os.remove(file_name) if(kwargs.get('file_list')!=None): path_list = kwargs['file_list'] for file_name in path_list: wikiConverter.wiki_knolml_converter(file_name) file_name = file_name[:-4] + '.knolml' wikiConverter.compress(file_name,output_dir) os.remove(file_name) if((kwargs.get('file_name')==None) and (kwargs.get('file_list')==None)): print("No arguments provided") def returnList(self, l, n): for i in range(0,len(l),n): yield l[i:i+n] @staticmethod def __file_lists(fileNum,c_num,fileNames): fileList = [] if(fileNum<c_num): for f in fileNames: fileList.append([f]) else: f = np.array_split(fileNames,c_num) for i in f: fileList.append(i.tolist()) return fileList @staticmethod def compressAll(dir_path, *args, **kwargs): t1 = time.time() if(kwargs.get('c_num')!=None): c_num = kwargs['c_num'] else: c_num = 4 # By default it is 4 fileNames = glob.glob(dir_path+'/*.xml') if(kwargs.get('output_dir')!=None): output_dir=kwargs['output_dir'] else: output_dir = os.getcwd() fileNum = len(fileNames) fileList = wikiConverter.__file_lists(fileNum, c_num, fileNames) l = Lock() processDict = {} if(fileNum<c_num): pNum = fileNum else: pNum = c_num for i in range(pNum): processDict[i+1] = Process(target=wikiConverter.wikiConvert,kwargs={'output_dir':output_dir,'file_list': fileList[i],'l': l}) for i in range(pNum): processDict[i+1].start() for i in range(pNum): processDict[i+1].join() t2 = time.time() print("All process done with time: ",str(t2-t1)) @staticmethod def convertwiki(*args, **kwargs): if(kwargs.get('output_dir')!=None): output_dir = kwargs['output_dir'] if(kwargs.get('file_name')!=None): file_name = kwargs['file_name'] wikiConverter.wiki_knolml_converter(file_name,output_dir=output_dir) file_name = file_name[:-4] + '.knolml' #wikiConverter.compress(file_name,output_dir) #os.remove(file_name) if(kwargs.get('file_list')!=None): path_list = kwargs['file_list'] for file_name in path_list: wikiConverter.wiki_knolml_converter(file_name, output_dir=output_dir) file_name = file_name[:-4] + '.knolml' #wikiConverter.compress(file_name,output_dir) #os.remove(file_name) if((kwargs.get('file_name')==None) and (kwargs.get('file_list')==None)): print("No arguments provided") @staticmethod def convertall(dir_path, *args, **kwargs): t1 = time.time() if(kwargs.get('c_num')!=None): c_num = kwargs['c_num'] else: c_num = 4 # By default it is 4 fileNames = glob.glob(dir_path+'/*.xml') if(kwargs.get('output_dir')!=None): output_dir=kwargs['output_dir'] if not os.path.isdir(output_dir): os.makedirs(output_dir) else: output_dir = os.getcwd() fileNum = len(fileNames) fileList = wikiConverter.__file_lists(fileNum, c_num, fileNames) l = Lock() processDict = {} if(fileNum<c_num): pNum = fileNum else: pNum = c_num for i in range(pNum): processDict[i+1] = Process(target=wikiConverter.convertwiki,kwargs={'output_dir':output_dir,'file_list': fileList[i],'l': l}) for i in range(pNum): processDict[i+1].start() for i in range(pNum): processDict[i+1].join() t2 = time.time() print("All process done with time: ",str(t2-t1)) @staticmethod def getArticle(*args, **kwargs): # articleName = raw_input() # articleName = articleName.replace(' ', '_') featuredArticleList = [] if(kwargs.get('file_name')!=None): featuredArticleList.append(kwargs['file_name']) if(kwargs.get('file_list')!=None): featuredArticleList = kwargs['file_list'] if(kwargs.get('output_dir')!=None): output_dir = kwargs['output_dir']+'/' if not os.path.exists(output_dir): os.makedirs(output_dir) else: output_dir = '' for each in featuredArticleList: articleName = each articleName = articleName.replace(' ','_') articleName = articleName.replace('/','__') file_handler = io.open(output_dir+articleName+'.xml', mode='w+', encoding='utf-8') url = 'https://en.m.wikipedia.org/w/index.php?title=Special:Export&pages=' + articleName + '&history=1&action=submit' headers = { 'user-agent': 'Mozilla/5.0 (Linux; Android 6.0; Nexus 5 Build/MRA58N) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/66.0.3359.139 Mobile Safari/537.36' } print('Downloading ' + articleName + '...') r = requests.get(url, headers=headers) if r.status_code == 200: xml = r.text file_handler.write(xml) print(articleName,'Completed!') else: print('Something went wrong! ' + articleName + '\n' + '\n') file_handler.close() wikiConverter.wiki_knolml_converter(output_dir+articleName+'.xml') @staticmethod def serialCompress(self,dir_path, *args, **kwargs): t1 = time.time() file_list = os.listdir(dir_path) path_list = [] if(kwargs.get('output_dir')!=None): output_dir=kwargs['output_dir'] else: output_dir = os.getcwd() for f in file_list: path_list.append(dir_path+'/'+f) self.convert(path_list,output_dir=output_dir) t2 = time.time() print("all process done: ",str(t2-t1))
python
# Copyright 2011 OpenStack Foundation # Copyright 2013 Rackspace Hosting # Copyright 2013 Hewlett-Packard Development Company, L.P. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from unittest import mock import fixtures from keystoneauth1 import adapter import logging import requests import testtools from troveclient.apiclient import client from troveclient import client as other_client from troveclient import exceptions from troveclient import service_catalog import troveclient.v1.client class ClientTest(testtools.TestCase): def test_get_client_class_v1(self): version_map = other_client.get_version_map() output = client.BaseClient.get_class('database', '1.0', version_map) self.assertEqual(troveclient.v1.client.Client, output) def test_get_client_class_unknown(self): version_map = other_client.get_version_map() self.assertRaises(exceptions.UnsupportedVersion, client.BaseClient.get_class, 'database', '0', version_map) def test_client_with_auth_system_without_auth_plugin(self): self.assertRaisesRegex( exceptions.AuthSystemNotFound, "AuthSystemNotFound: 'something'", other_client.HTTPClient, user='user', password='password', projectid='project', timeout=2, auth_url="http://www.blah.com", auth_system='something') def test_client_with_auth_system_without_endpoint(self): auth_plugin = mock.Mock() auth_plugin.get_auth_url = mock.Mock(return_value=None) self.assertRaises( exceptions.EndpointNotFound, other_client.HTTPClient, user='user', password='password', projectid='project', timeout=2, auth_plugin=auth_plugin, auth_url=None, auth_system='something') def test_client_with_timeout(self): instance = other_client.HTTPClient(user='user', password='password', projectid='project', timeout=2, auth_url="http://www.blah.com", insecure=True) self.assertEqual(2, instance.timeout) mock_request = mock.Mock() mock_request.return_value = requests.Response() mock_request.return_value.status_code = 200 mock_request.return_value.headers = { 'x-server-management-url': 'blah.com', 'x-auth-token': 'blah', } with mock.patch('requests.request', mock_request): instance.authenticate() requests.request.assert_called_with( mock.ANY, mock.ANY, timeout=2, headers=mock.ANY, verify=mock.ANY) def test_client_unauthorized(self): instance = other_client.HTTPClient(user='user', password='password', projectid='project', timeout=2, auth_url="http://www.blah.com", cacert=mock.Mock()) instance.auth_token = 'foobar' instance.management_url = 'http://example.com' instance.get_service_url = mock.Mock(return_value='http://example.com') instance.version = 'v2.0' mock_request = mock.Mock() mock_request.side_effect = other_client.exceptions.Unauthorized(401) with mock.patch('requests.request', mock_request): self.assertRaises( exceptions.Unauthorized, instance.get, '/instances') def test_client_bad_request(self): instance = other_client.HTTPClient(user='user', password='password', projectid='project', timeout=2, auth_url="http://www.blah.com") instance.auth_token = 'foobar' instance.management_url = 'http://example.com' instance.get_service_url = mock.Mock(return_value='http://example.com') instance.version = 'v2.0' mock_request = mock.Mock() mock_request.side_effect = other_client.exceptions.BadRequest() with mock.patch('requests.request', mock_request): self.assertRaises( exceptions.BadRequest, instance.get, '/instances') def test_client_with_client_exception(self): instance = other_client.HTTPClient(user='user', password='password', projectid='project', timeout=2, auth_url="http://www.blah.com", retries=2) instance.auth_token = 'foobar' instance.management_url = 'http://example.com' instance.get_service_url = mock.Mock(return_value='http://example.com') instance.version = 'v2.0' mock_request = mock.Mock() mock_request.side_effect = other_client.exceptions.ClientException() type(mock_request.side_effect).code = mock.PropertyMock( side_effect=[501, 111]) with mock.patch('requests.request', mock_request): self.assertRaises( exceptions.ClientException, instance.get, '/instances') def test_client_connection_error(self): instance = other_client.HTTPClient(user='user', password='password', projectid='project', timeout=2, auth_url="http://www.blah.com", retries=2) instance.auth_token = 'foobar' instance.management_url = 'http://example.com' instance.get_service_url = mock.Mock(return_value='http://example.com') instance.version = 'v2.0' mock_request = mock.Mock() mock_request.side_effect = requests.exceptions.ConnectionError( 'connection refused') with mock.patch('requests.request', mock_request): self.assertRaisesRegex( exceptions.ClientException, 'Unable to establish connection: connection refused', instance.get, '/instances') @mock.patch.object(other_client.HTTPClient, 'request', return_value=(200, "{'versions':[]}")) def _check_version_url(self, management_url, version_url, mock_request): projectid = '25e469aa1848471b875e68cde6531bc5' instance = other_client.HTTPClient(user='user', password='password', projectid=projectid, auth_url="http://www.blah.com") instance.auth_token = 'foobar' instance.management_url = management_url % projectid mock_get_service_url = mock.Mock(return_value=instance.management_url) instance.get_service_url = mock_get_service_url instance.version = 'v2.0' # If passing None as the part of url, a client accesses the url which # doesn't include "v2/<projectid>" for getting API version info. instance.get('') mock_request.assert_called_once_with(instance.management_url, 'GET', headers=mock.ANY) mock_request.reset_mock() # Otherwise, a client accesses the url which includes "v2/<projectid>". instance.get('/instances') url = instance.management_url + '/instances' mock_request.assert_called_once_with(url, 'GET', headers=mock.ANY) def test_client_version_url(self): self._check_version_url('http://foo.com/v1/%s', 'http://foo.com/') def test_client_version_url_with_tenant_name(self): self._check_version_url('http://foo.com/trove/v1/%s', 'http://foo.com/trove/') def test_log_req(self): logger = self.useFixture( fixtures.FakeLogger( name='troveclient.client', format="%(message)s", level=logging.DEBUG, nuke_handlers=True ) ) cs = other_client.HTTPClient(user='user', password='password', projectid=None, auth_url="http://www.blah.com", http_log_debug=True) cs.http_log_req(('/foo', 'GET'), {'headers': {}}) cs.http_log_req(('/foo', 'GET'), {'headers': {'X-Auth-Token': 'totally_bogus'}}) cs.http_log_req( ('/foo', 'GET'), {'headers': {}, 'data': '{"auth": {"passwordCredentials": ' '{"password": "password"}}}'}) output = logger.output.split('\n') self.assertIn("REQ: curl -i /foo -X GET", output) self.assertIn( "REQ: curl -i /foo -X GET -H " '"X-Auth-Token: totally_bogus"', output) self.assertIn( "REQ: curl -i /foo -X GET -d " '\'{"auth": {"passwordCredentials": {"password":' ' "password"}}}\'', output) @mock.patch.object(service_catalog, 'ServiceCatalog') def test_client_auth_token(self, mock_service_catalog): auth_url = 'http://www.blah.com' proxy_token = 'foobar' proxy_tenant_id = 'user' mock_service_catalog.return_value.get_token = mock.Mock( return_value=proxy_token) instance = other_client.HTTPClient(proxy_token=proxy_token, proxy_tenant_id=proxy_tenant_id, user=None, password=None, tenant_id=proxy_tenant_id, projectid=None, timeout=2, auth_url=auth_url) instance.management_url = 'http://example.com' instance.get_service_url = mock.Mock(return_value='http://example.com') instance.version = 'v2.0' mock_request = mock.Mock() mock_request.return_value = requests.Response() mock_request.return_value.status_code = 200 mock_request.return_value.headers = { 'x-server-management-url': 'blah.com', 'x-auth-token': 'blah', } with mock.patch('requests.request', mock_request): instance.authenticate() mock_request.assert_called_with( 'GET', auth_url + '/tokens/foobar?belongsTo=user', headers={'User-Agent': 'python-troveclient', 'Accept': 'application/json', 'X-Auth-Token': proxy_token}, timeout=2, verify=True) @mock.patch.object(service_catalog, 'ServiceCatalog', side_effect=KeyError) def test_client_auth_token_authorization_failure(self, mock_service_catalog): auth_url = 'http://www.blah.com' proxy_token = 'foobar' proxy_tenant_id = 'user' mock_service_catalog.return_value.get_token = mock.Mock( return_value=proxy_token) instance = other_client.HTTPClient(proxy_token=proxy_token, proxy_tenant_id=proxy_tenant_id, user=None, password=None, tenant_id=proxy_tenant_id, projectid=None, timeout=2, auth_url=auth_url) instance.management_url = 'http://example.com' instance.get_service_url = mock.Mock(return_value='http://example.com') instance.version = 'v2.0' mock_request = mock.Mock() mock_request.return_value = requests.Response() mock_request.return_value.status_code = 200 mock_request.return_value.headers = { 'x-server-management-url': 'blah.com', 'x-auth-token': 'blah', } with mock.patch('requests.request', mock_request): self.assertRaises(exceptions.AuthorizationFailure, instance.authenticate) @mock.patch.object(service_catalog, 'ServiceCatalog', side_effect=other_client.exceptions.EndpointNotFound) def test_client_auth_token_endpoint_not_found(self, mock_service_catalog): auth_url = 'http://www.blah.com' proxy_token = 'foobar' proxy_tenant_id = 'user' mock_service_catalog.return_value.get_token = mock.Mock( return_value=proxy_token) instance = other_client.HTTPClient(proxy_token=proxy_token, proxy_tenant_id=proxy_tenant_id, user=None, password=None, tenant_id=proxy_tenant_id, projectid=None, timeout=2, auth_url=auth_url) instance.management_url = 'http://example.com' instance.get_service_url = mock.Mock(return_value='http://example.com') instance.version = 'v2.0' mock_request = mock.Mock() mock_request.return_value = requests.Response() mock_request.return_value.status_code = 200 mock_request.return_value.headers = { 'x-server-management-url': 'blah.com', 'x-auth-token': 'blah', } with mock.patch('requests.request', mock_request): self.assertRaises(exceptions.EndpointNotFound, instance.authenticate) @mock.patch.object(service_catalog, 'ServiceCatalog') def test_client_auth_token_v1_auth_failure(self, mock_service_catalog): auth_url = 'http://www.blah.com' proxy_token = 'foobar' proxy_tenant_id = 'user' mock_service_catalog.return_value.get_token = mock.Mock( return_value=proxy_token) instance = other_client.HTTPClient(proxy_token=proxy_token, proxy_tenant_id=proxy_tenant_id, user=None, password=None, tenant_id=proxy_tenant_id, projectid=None, timeout=2, auth_url=auth_url) instance.management_url = 'http://example.com' instance.get_service_url = mock.Mock(return_value='http://example.com') instance.version = 'v1.0' mock_request = mock.Mock() mock_request.return_value = requests.Response() mock_request.return_value.status_code = 200 mock_request.return_value.headers = { 'x-server-management-url': 'blah.com', 'x-auth-token': 'blah', } with mock.patch('requests.request', mock_request): self.assertRaises(exceptions.NoTokenLookupException, instance.authenticate) @mock.patch.object(service_catalog, 'ServiceCatalog') def test_client_auth_token_v1_auth(self, mock_service_catalog): auth_url = 'http://www.blah.com' proxy_token = 'foobar' mock_service_catalog.return_value.get_token = mock.Mock( return_value=proxy_token) instance = other_client.HTTPClient(user='user', password='password', projectid='projectid', timeout=2, auth_url=auth_url) instance.management_url = 'http://example.com' instance.get_service_url = mock.Mock(return_value='http://example.com') instance.version = 'v1.0' mock_request = mock.Mock() mock_request.return_value = requests.Response() mock_request.return_value.status_code = 200 mock_request.return_value.headers = { 'x-server-management-url': 'blah.com', } headers = {'Content-Type': 'application/json', 'Accept': 'application/json', 'User-Agent': 'python-troveclient'} with mock.patch('requests.request', mock_request): instance.authenticate() called_args, called_kwargs = mock_request.call_args self.assertEqual(('POST', 'http://www.blah.com/v2.0/tokens'), called_args) self.assertEqual(headers, called_kwargs['headers']) def test_client_get(self): auth_url = 'http://www.blah.com' instance = other_client.HTTPClient(user='user', password='password', projectid='project_id', timeout=2, auth_url=auth_url) instance._cs_request = mock.Mock() instance.get('clusters') instance._cs_request.assert_called_with('clusters', 'GET') def test_client_patch(self): auth_url = 'http://www.blah.com' body = mock.Mock() instance = other_client.HTTPClient(user='user', password='password', projectid='project_id', timeout=2, auth_url=auth_url) instance._cs_request = mock.Mock() instance.patch('instances/dummy-instance-id', body=body) instance._cs_request.assert_called_with( 'instances/dummy-instance-id', 'PATCH', body=body) def test_client_post(self): auth_url = 'http://www.blah.com' body = {"add_shard": {}} instance = other_client.HTTPClient(user='user', password='password', projectid='project_id', timeout=2, auth_url=auth_url) instance._cs_request = mock.Mock() instance.post('clusters/dummy-cluster-id', body=body) instance._cs_request.assert_called_with( 'clusters/dummy-cluster-id', 'POST', body=body) def test_client_put(self): auth_url = 'http://www.blah.com' body = {"user": {"password": "new_password"}} instance = other_client.HTTPClient(user='user', password='password', projectid='project_id', timeout=2, auth_url=auth_url) instance._cs_request = mock.Mock() instance.put('instances/dummy-instance-id/user/dummy-user', body=body) instance._cs_request.assert_called_with( 'instances/dummy-instance-id/user/dummy-user', 'PUT', body=body) def test_client_delete(self): auth_url = 'http://www.blah.com' instance = other_client.HTTPClient(user='user', password='password', projectid='project_id', timeout=2, auth_url=auth_url) instance._cs_request = mock.Mock() instance.delete('/backups/dummy-backup-id') instance._cs_request.assert_called_with('/backups/dummy-backup-id', 'DELETE') @mock.patch.object(adapter.LegacyJsonAdapter, 'request') def test_database_service_name(self, m_request): m_request.return_value = (mock.MagicMock(status_code=200), None) client = other_client.SessionClient(session=mock.MagicMock(), auth=mock.MagicMock()) client.request("http://no.where", 'GET') self.assertIsNone(client.database_service_name) client = other_client.SessionClient(session=mock.MagicMock(), auth=mock.MagicMock(), database_service_name='myservice') client.request("http://no.where", 'GET') self.assertEqual('myservice', client.database_service_name) @mock.patch.object(adapter.LegacyJsonAdapter, 'request') @mock.patch.object(adapter.LegacyJsonAdapter, 'get_endpoint', return_value=None) def test_error_sessionclient(self, m_end_point, m_request): m_request.return_value = (mock.MagicMock(status_code=200), None) self.assertRaises(exceptions.EndpointNotFound, other_client.SessionClient, session=mock.MagicMock(), auth=mock.MagicMock()) def test_construct_http_client(self): mock_request = mock.Mock() mock_request.return_value = requests.Response() mock_request.return_value.status_code = 200 mock_request.return_value.headers = { 'x-server-management-url': 'blah.com', 'x-auth-token': 'blah', } with mock.patch('requests.request', mock_request): self.assertIsInstance(other_client._construct_http_client(), other_client.HTTPClient) self.assertIsInstance( other_client._construct_http_client(session=mock.Mock(), auth=mock.Mock()), other_client.SessionClient)
python
# Imagekit options from imagekit import processors from imagekit.specs import ImageSpec class Options(object): """ Class handling per-model imagekit options """ image_field = 'image' crop_horz_field = 'crop_horz' crop_vert_field = 'crop_vert' preprocessor_spec = None cache_dir = 'cache' save_count_as = None cache_filename_format = "%(filename)s_%(specname)s.%(extension)s" admin_thumbnail_spec = 'admin_thumbnail' spec_module = 'imagekit.defaults' #storage = defaults to image_field.storage def __init__(self, opts): for key, value in opts.__dict__.iteritems(): setattr(self, key, value) self.specs = []
python
import numpy as np import pandas as pd from typing import Any, Union def get_timestamp(value: Union[int, str]) -> Union[pd.Timestamp, None]: if value is None or isinstance(value, pd.Timestamp): return value if isinstance(value, (int, np.integer)): return pd.Timestamp(value, unit='s') return pd.Timestamp(value) def is_iterable(something: Any) -> bool: """ check if something is a list, tuple or set :param something: any object :return: bool. true if something is a list, tuple or set """ return isinstance(something, (list, tuple, set)) def timeseries_data_to_write_request(data: Union[pd.DataFrame, pd.Series, np.ndarray, np.recarray], tbk: str, ) -> dict: if isinstance(data, (np.ndarray, np.recarray)): return _np_array_to_dataset_params(data) elif isinstance(data, pd.Series): return _pd_series_to_dataset_params(data, tbk) elif isinstance(data, pd.DataFrame): return _pd_dataframe_to_dataset_params(data) raise TypeError('data must be pd.DataFrame, pd.Series, np.ndarray, or np.recarray') def _np_array_to_dataset_params(data: Union[np.ndarray, np.recarray]) -> dict: if not data.dtype.names: raise TypeError('numpy arrays must declare named column dtypes') return dict(column_types=[data.dtype[name].str.replace('<', '') for name in data.dtype.names], column_names=list(data.dtype.names), column_data=[bytes(memoryview(data[name])) for name in data.dtype.names], length=len(data)) def _pd_series_to_dataset_params(data: pd.Series, tbk: str) -> dict: # single column of data (indexed by timestamp, eg from ohlcv_df['ColName']) if data.index.name == 'Epoch': epoch = bytes(memoryview(data.index.to_numpy(dtype='i8') // 10**9)) return dict(column_types=['i8', data.dtype.str.replace('<', '')], column_names=['Epoch', data.name or tbk.split('/')[-1]], column_data=[epoch, bytes(memoryview(data.to_numpy()))], length=len(data)) # single row of data (named indexes for one timestamp, eg from ohlcv_df.iloc[N]) epoch = bytes(memoryview(data.name.to_numpy().astype(dtype='i8') // 10**9)) return dict(column_types=['i8'] + [data.dtype.str.replace('<', '') for _ in range(0, len(data))], column_names=['Epoch'] + data.index.to_list(), column_data=[epoch] + [bytes(memoryview(val)) for val in data.array], length=1) def _pd_dataframe_to_dataset_params(data: pd.DataFrame) -> dict: epoch = bytes(memoryview(data.index.to_numpy(dtype='i8') // 10**9)) return dict(column_types=['i8'] + [dtype.str.replace('<', '') for dtype in data.dtypes], column_names=['Epoch'] + data.columns.to_list(), column_data=[epoch] + [bytes(memoryview(data[col].to_numpy())) for col in data.columns], length=len(data))
python
''' # Devs: Ali; Rakib; ''' from setuptools import setup, find_packages # Setup configuration for the tool setup( name='OEDA-Backend', version='1.0', long_description="", packages=find_packages(), include_package_data=False, zip_safe=False, install_requires=[ # Tempita is a small templating language for text substitution. 'Tempita', # MIT license # coloring terminal text 'colorama', # BSD license (BSD) # python server related 'tornado', # Apache license 'flask_restful', # BSD License (BSD) 'flask_cors', # MIT License (MIT) 'requests', # http integreation, Apache Software License (Apache 2.0) 'pyjwt', # JSON Web Token implementation in Python, MIT License (MIT) 'backports.ssl_match_hostname', # The ssl.match_hostname() function from Python 3.5, Python Software Foundation License # database 'elasticsearch', # Apache Software License (Apache License, Version 2.0) 'numpy>=1.14.2', # scientific computing, OSI Approved (BSD) 'statsmodels', # statistics and statistical testing, BSD License (BSD License) 'scikit-optimize>=0.5.2', # gauss optimizer, BSD 'pandas', # Powerful data structures for data analysis, time series, and statistics, BSD 'scipy', # Scientific Library for Python, BSD License (BSD) # Font 'freetype-py', # bindings for the FreeType library, GNU General Public License (GPL) # visualization 'pypng', # PNG image files to be read and written using pure Python, MIT License 'matplotlib', # Python Software Foundation License (BSD) 'seaborn', # statistical data visualization, BSD License (BSD (3-clause)) # data streaming 'kafka', # Pure Python client for Apache Kafka, Apache Software License (Apache License 2.0) 'paho-mqtt', # MQTT version 3.1.1 client class, OSI Approved (Eclipse Public License v1.0 / Eclipse Distribution License v1.0) ] )
python
import logging from pint import UnitRegistry, DimensionalityError, DefinitionSyntaxError, \ UndefinedUnitError from discord import Embed from discord.ext import commands log = logging.getLogger(f'charfred.{__name__}') class UnitConverter(commands.Cog): def __init__(self, bot): self.bot = bot self.session = bot.session self.ur = UnitRegistry() self.ur.autoconvert_offset_to_baseunit = True @commands.group() async def convert(self, ctx): """Converts stuff. Just measurements and temperatures for now. """ pass @convert.command() async def units(self, ctx, measurement: str, targetunit: str): """Converts a measurement to given target units. If you wanna convert temperatures, please use: 'deg' in front of the usual letter for your units, such as 'degC' for Celsius or 'degF' for Fahrenheit. """ try: m = self.ur(measurement) out = m.to(targetunit) except DimensionalityError as e: log.warning(e) await ctx.sendmarkdown(f'< Error! >' f'< {e} >') except DefinitionSyntaxError as e: log.warning(e) await ctx.sendmarkdown(f'< Unable to parse {measurement}! >\n' f'< {e} >') except UndefinedUnitError as e: log.warning(e) await ctx.sendmarkdown('< Sorry, I can only do basic units >\n' '< and temperatures. >') else: await ctx.sendmarkdown(f'# {measurement} is (roughly) {out}') @convert.command() async def block(self, ctx, x, z): """Convert Minecraft x, z coordinates to chunk and region. """ chunk = f'{(int(x) >> 4)}, {(int(z) >> 4)}' regionfile = 'r.' + str((int(x) >> 4) // 32) + '.' + str((int(z) >> 4) // 32) + '.mca' await ctx.sendmarkdown(f'# Coordinates x:{x}, z:{z} correspond to:\n' f'Chunk coordinates: {chunk}\n' f'Region file: {regionfile}') @convert.command() async def uuid(self, ctx, uuid: str): """Convert Minecraft UUID to Userprofile Info. More of a 'fetch' than a 'convert', since the data isn't actually stored in the UUID, but what the hell... """ async with self.session.get('https://sessionserver.mojang.com/' f'session/minecraft/profile/{uuid}') as r: d = await r.json() if not d: await ctx.sendmarkdown('< Couldn\'t get anything, sorry! >') return card = Embed( title=f'__Subject: {d["name"]}__', type='rich', color=0xe77070 ) card.set_thumbnail( url=f'https://crafatar.com/renders/body/{uuid}?overlay' ) card.add_field( name="Current Name:", value="```\n" + d["name"] + "\n```" ) card.add_field( name="UUID: (hey, you already know this!)", value="```\n" + uuid + "\n```" ) card.set_footer(text="Look at that asshole... ಠ_ಠ") await ctx.send(embed=card) def setup(bot): bot.add_cog(UnitConverter(bot))
python
import pytest EXAMPLE = """\ { "version": "2020-11-30", "data": [ { "jisx0402": "13101", "old_code": "100", "postal_code": "1008105", "prefecture_kana": "", "city_kana": "", "town_kana": "", "town_kana_raw": "", "prefecture": "東京都", "city": "千代田区", "town": "大手町", "koaza": "", "kyoto_street": "", "building": "", "floor": "", "town_partial": false, "town_addressed_koaza": false, "town_chome": false, "town_multi": false, "town_raw": "大手町", "corporation": { "name": "チッソ 株式会社", "name_kana": "チツソ カブシキガイシヤ", "block_lot": "2丁目2-1(新大手町ビル)", "post_office": "銀座", "code_type": 0 } } ] } """ @pytest.fixture def dummy_json(): import json return json.loads(EXAMPLE)
python
# Comment section # spoil written by Korbelz # current scope: spoil Calc print ('*** This app is a fuel/supply spoilage calc ***') print ('*** Written by Korbelz ***') print ('*** Feedback/Bugs: Discord: Korbelz#3504 ***') input('Press ENTER to continue') port = input("what size is the port? ") port = int(port) airfield = input("what size is the airfield? ") airfield = int(airfield) fuel_waste = 1000 + ((port + airfield) * (port + airfield) * 2000) supply_waste = 5000 + ((port + airfield) * (port + airfield) * 3000) print (f'fuel over {fuel_waste} will spoil at this base' ) print (f'supply over {supply_waste} will spoil at this base' ) input('Press ENTER to exit')
python
#!/usr/bin/env python3 import pytest import glooey from vecrec import Rect def test_misspelled_alignment(): with pytest.raises(glooey.UsageError) as err: glooey.drawing.align('not an alignment', None, None) def test_parent_changed(): child, parent = Rect.null(), Rect.null() def change_parent(child_rect, parent_rect): parent_rect.left += 1 with pytest.raises(RuntimeError, match='change_parent'): glooey.drawing.align(change_parent, child, parent) def test_child_outside_parent(): child = Rect.from_square(4.5) parent = Rect.from_square(6) def move_1px_right(child_rect, parent_rect): child_rect.left += 1 # This should be fine the first time... glooey.drawing.align(move_1px_right, child, parent) # ...and also fine the second time, because the child is allowed to exceed # its parent by 1 px to account for rounding errors... glooey.drawing.align(move_1px_right, child, parent) # ...but out-of-bounds the third time. with pytest.raises(RuntimeError, match='move_1px_right'): glooey.drawing.align(move_1px_right, child, parent)
python
# from typing import NamedTuple from monkey.tokens import token from collections import OrderedDict class Node: # this method used only for debugging def token_literal(self): pass def string(self): pass class Statement(Node): node = None # dummy method def statement_node(self): pass class Expression(Node): node = None # dummy method def expression_node(self): pass class Program(Node): statements = [] def __init__(self, statements=None): if statements == None: statements = [] self.statements = statements def token_literal(self): if len(self.statements) > 0: return self.statements[0].TokenLiteral() else: return "" def string(self): # for now just return string out = "" for s in self.statements: out = out + s.string() return out class Identifier(Expression): token = None # Token value = "" def __init__(self, token, value): self.token = token self.value = value def token_literal(self): return self.token.Literal def string(self): return self.value class LetStatement(Statement): token = None # Token name = None # Identifier value = None # Expression def __init__(self, token=None, name=None, value=None): self.token = token self.name = name self.value = value def token_literal(self): return self.token.Literal def string(self): out = self.token_literal() + " " out = out + self.name.string() out = out + " = " if self.value != None: out = out + self.value.string() out = out + ";" return out def __eq__(self, other): return isinstance(other, LetStatement) and self.__dict__ == other.__dict__ class ReturnStatement(Statement): token = None # Token return_value = None # Expression def __init__(self, token=None, return_value=None): self.token = token self.return_value = return_value def token_literal(self): return self.token.Literal def string(self): out = self.token_literal() + " " if self.return_value != None: out = out + self.return_value.string() out = out + ";" return out def __eq__(self, other): return isinstance(other, ReturnStatement) and self.__dict__ == other.__dict__ class ExpressionStatement(Statement): token = None expression = None # Expression def __init__(self, token=None, expression=None): self.token = token self.expression = expression def token_literal(self): return self.token.Literal def string(self): if self.expression != None: return self.expression.string() return "" def __hash__(self): return hash(str(self.expression)) def __eq__(self, other): return isinstance(other, ExpressionStatement) and self.__dict__ == other.__dict__ class IntegerLiteral(Expression): token = None # Token value = 0 # integer def __init__(self, token=None, value=0): self.token = token self.value = value def token_literal(self): return self.token.Literal def string(self): return str(self.value) def __hash__(self): return hash(self.value) def __eq__(self, other): return isinstance(other, IntegerLiteral) and self.__dict__ == other.__dict__ class StringLiteral(Expression): token = None # Token value = "" # str def __init__(self, token, value=""): self.token = token self.value = value def token_literal(self): return self.token.Literal def string(self): return self.token.Literal class PrefixExpression(Expression): token = None # Token operator = "" right = None # Expression def __init__(self, token=None, operator="", right=None): self.token = token self.operator = operator self.right = right def token_literal(self): return self.token.Literal def string(self): out = "(" + self.operator + self.right.string() + ")" return out def __eq__(self, other): return isinstance(other, PrefixExpression) and self.__dict__ == other.__dict__ class InfixExpression(Expression): token = None # Token left = None # Expression operator = "" right = None # Expression def __init__(self, token=None, operator="", left=None, right=None): self.token = token self.left = left self.operator = operator self.right = right def token_literal(self): return self.token.Literal def string(self): out = "(" + self.left.string() + " " + self.operator + " " + self.right.string() + ")" return out def __hash__(self): return hash(self.left) def __eq__(self, other): return isinstance(other, InfixExpression) and self.__dict__ == other.__dict__ class Boolean(Expression): token = None value = False def __init__(self, token, value): self.token = token self.value = value def token_literal(self): return self.token.Literal def string(self): return self.token.Literal class IfExpression(Expression): token = None # 'if' token condition = None # Expression consequence = None # BlockStatement alternative = None # BlockStatement def __init__(self, token=None, condition=None, consequence=None, alternative=None): self.token = token self.condition = condition self.consequence = consequence self.alternative = alternative def token_literal(self): return self.token.Literal def string(self): out = "if" + self.condition.string() + " " + self.consequence.string() if self.alternative != None: out = out + "else " + self.alternative.string() return out def __eq__(self, other): return isinstance(other, IfExpression) and self.__dict__ == other.__dict__ class BlockStatement(Statement): token = None statements = [] # Statement(s) def __init__(self, token=None, statements=None): self.token = token if statements == None: statements = [] self.statements = statements def token_literal(self): return self.token.Literal def string(self): out = "" for s in self.statements: out = out + s.string() return out def __eq__(self, other): return isinstance(other, BlockStatement) and self.__dict__ == other.__dict__ class CallExpression(Expression): token = None function = None # Identifier or FunctionLiteral arguments = [] # Expression def __init__(self, token, function=None, arguments=None): self.token = token self.function = function if arguments == None: arguments = [] self.arguments = arguments def token_literal(self): return self.token.Literal def string(self): args = [] for a in self.arguments: args.append(a.string()) out = "" + self.function.string() out = out + "(" + ", ".join(args) + ")" return out class FunctionLiteral(Expression): token = None # fn parameters = [] # Identifier body = None # BlockStatement def __init__(self, token=None, parameters=None, body=None): self.token = token if parameters == None: parameters = [] self.parameters = parameters self.body = body def token_literal(self): return self.token.Literal def string(self): args = [] for a in self.arguments: args.append(a.string()) out = "" + self.function.string() out = out + "(" + ", ".join(args) + ")" return out def __eq__(self, other): return isinstance(other, FunctionLiteral) and self.__dict__ == other.__dict__ class ArrayLiteral(Expression): token = None elements = [] # Expression def __init__(self, token=None, elements=None): self.token = token if elements == None: elements = [] self.elements = elements def token_literal(self): return self.token.Literal def string(self): elements = [] for e in self.elements: elements.append(e.string()) out = "[" + ", ".join(elements) + "]" return out def __eq__(self, other): return isinstance(other, ArrayLiteral) and self.__dict__ == other.__dict__ class IndexExpression(Expression): token = None left = None # Expression index = None # Expression def __init__(self, token=None, left=None, index=None): self.token = token self.left = left self.index = index def token_literal(self): return self.token.Literal def string(self): out = "(" + self.left.string() + "[" + self.index.string() + "])" return out def __eq__(self, other): return isinstance(other, IndexExpression) and self.__dict__ == other.__dict__ class HashLiteral(Expression): token = None # { token pairs = OrderedDict() # OrderedDict[Expression] def __init__(self, token=None, pairs=None): self.token = token self.pairs = pairs def token_literal(self): return self.token.Literal def string(self): pairs = [] for key, value in self.pairs.items(): pairs.append(key.string() + ":" + value.string()) out = "{" + ", ".join(pairs) + "}" return out def __eq__(self, other): return isinstance(other, HashLiteral) and self.__dict__ == other.__dict__ class MacroLiteral(Expression): token = None # macro literal parameters = [] # Identifier body = None # BlockStatement def __init__(self, token=None, parameters=None, body=None): self.token = token if parameters == None: parameters = [] self.parameters = parameters self.body = body def token_literal(self): return self.token.Literal def string(self): args = [] for a in self.arguments: args.append(a.string()) out = "" + self.token_literal() out = out + "(" + ", ".join(args) + ")" return out def __eq__(self, other): return isinstance(other, MacroLiteral) and self.__dict__ == other.__dict__
python
################################################################################ # Copyright (c) 2009-2020, National Research Foundation (SARAO) # # Licensed under the BSD 3-Clause License (the "License"); you may not use # this file except in compliance with the License. You may obtain a copy # of the License at # # https://opensource.org/licenses/BSD-3-Clause # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. ################################################################################ import numpy as np import astropy.units as u import matplotlib.pyplot as plt import katpoint ant = katpoint.Antenna('KAT7, -30:43:17.34, 21:24:38.46, 1038, 12.0') freq = 1800.0 * u.MHz freq_range = np.arange(900.0, 2100.0, 10.0) * u.MHz old_all = katpoint.Catalogue(open('source_list.csv'), antenna=ant, flux_frequency=freq) old = old_all.filter(flux_limit=10 * u.Jy) pks10 = katpoint.Catalogue(open('pkscat90_source_list.csv'), antenna=ant, flux_frequency=freq) pks = pks10.filter(flux_limit=10 * u.Jy) jy1_all = katpoint.Catalogue(open('kuehr1Jy_source_list.csv'), antenna=ant, flux_frequency=freq) jy1 = jy1_all.filter(flux_limit=10 * u.Jy) plot_rows = int(np.ceil(np.sqrt(len(old)))) plt.figure(1) plt.clf() for n, src in enumerate(old): flux = src.flux_density(freq) flux_str = f' {flux:.1f}' if not np.isnan(flux) else '' print(f'OLD: {src.names}{flux_str}') print(src.description) plt.subplot(plot_rows, plot_rows, n + 1) plt.plot(np.log10(freq_range.to_value(u.MHz)), np.log10(src.flux_density(freq_range).to_value(u.Jy)), 'b') jy1_src, min_dist = jy1.closest_to(src) if min_dist < 3 * u.arcmin: jy1_flux = jy1_src.flux_density(freq) jy1_flux_str = f' {jy1_flux:.1f}' if not np.isnan(jy1_flux) else '' print(f' --> 1JY: {jy1_src.names}{jy1_flux_str}') print(f' {jy1_src.description}') plt.plot(np.log10(freq_range.to_value(u.MHz)), np.log10(jy1_src.flux_density(freq_range).to_value(u.Jy)), 'r') jy1.remove(jy1_src.name) pks_src, min_dist = pks.closest_to(src) if min_dist < 3 * u.arcmin: pks_flux = pks_src.flux_density(freq) pks_flux_str = f' {pks_flux:.1f}' if not np.isnan(pks_flux) else '' print(f' --> PKS: {pks_src.names}{pks_flux_str}') print(f' {pks_src.description}') plt.plot(np.log10(freq_range.to_value(u.MHz)), np.log10(pks_src.flux_density(freq_range).to_value(u.Jy)), 'g') pks.remove(pks_src.name) plt.axis((np.log10(freq_range[0].to_value(u.MHz)), np.log10(freq_range[-1].to_value(u.MHz)), 0, 4)) plt.xticks([]) plt.yticks([]) print() plt.figtext(0.5, 0.93, 'Spectra (log S vs. log v) old=b, 1Jy=r, pks=g', ha='center', va='center') plt.show()
python
#!/usr/bin/env python # -*- coding: utf-8 -*- import sys import os import cw sys.setrecursionlimit(1073741824) def main(): if len(cw.SKIN_CONV_ARGS) > 0: os.chdir(os.path.dirname(sys.argv[0]) or '.') try: app = cw.frame.MyApp(0) app.MainLoop() finally: cw.util.clear_mutex() if __name__ == "__main__": main()
python
from flask_appbuilder.security.manager import AUTH_OAUTH from airflow.www.security import AirflowSecurityManager from auth import config WTF_CSRF_ENABLED = True AUTH_TYPE = AUTH_OAUTH AUTH_USER_REGISTRATION_ROLE = 'Admin' AUTH_USER_REGISTRATION = True AUTH_ROLES_SYNC_AT_LOGIN = True OAUTH_PROVIDERS = [ { 'name': 'authbroker', 'token_key': 'access_token', 'icon': 'fa-lock', 'remote_app': { 'api_base_url': config.AUTHBROKER_URL + 'api/v1/user/', # type: ignore 'access_token_url': config.AUTHBROKER_URL + 'o/token/', # type: ignore 'authorize_url': config.AUTHBROKER_URL + 'o/authorize/', # type: ignore 'request_token_url': None, 'client_id': config.AUTHBROKER_CLIENT_ID, 'client_secret': config.AUTHBROKER_CLIENT_SECRET, 'access_token_method': 'POST', 'client_kwargs': {'scope': 'read write'}, }, } ] class CustomSecurityManager(AirflowSecurityManager): def oauth_user_info(self, provider, response=None): # pylint: disable=method-hidden user_json = self.appbuilder.sm.oauth_remotes[provider].get('me').json() return { 'username': user_json['user_id'], 'email': user_json['email'], 'first_name': user_json['first_name'], 'last_name': user_json['last_name'], } SECURITY_MANAGER_CLASS = CustomSecurityManager
python
# !/usr/bin/python # vim: set fileencoding=utf8 : # __author__ = 'keping.chu' import multiprocessing as mp from threading import Thread import aiohttp import easyquotation import time from easyquant import PushBaseEngine from easyquant.event_engine import Event class FixedDataEngine(PushBaseEngine): EventType = 'custom' PushInterval = 15 def __init__(self, event_engine, clock_engine, watch_stocks=None, s='sina'): self.watch_stocks = watch_stocks self.s = s self.source = None self.__queue = mp.Queue(1000) self.is_pause = not clock_engine.is_tradetime_now() self._control_thread = Thread(target=self._process_control, name="FixedDataEngine._control_thread") self._control_thread.start() super(FixedDataEngine, self).__init__(event_engine, clock_engine) def _process_control(self): while True: try: msg = self.__queue.get(block=True) if msg == "pause": self.is_pause = True else: self.is_pause = False except: pass def pause(self): self.__queue.put("pause") def work(self): self.__queue.put("work") def init(self): # 进行相关的初始化操作 self.source = easyquotation.use(self.s) def fetch_quotation(self): # 返回行情 return self.source.stocks(self.watch_stocks) def push_quotation(self): while self.is_active: if self.is_pause: time.sleep(1) continue try: response_data = self.fetch_quotation() except aiohttp.errors.ServerDisconnectedError: time.sleep(self.PushInterval) continue event = Event(event_type=self.EventType, data=response_data) self.event_engine.put(event) time.sleep(self.PushInterval)
python
from ted_sws.core.model.notice import Notice from ted_sws.core.model.manifestation import XMLManifestation class FakeNotice(Notice): ted_id: str = 'fake-notice-id' xml_manifestation: XMLManifestation = XMLManifestation( object_data='<?xml version="1.0" encoding="UTF-8"?><TED_EXPORT xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns="http://publications.europa.eu/resource/schema/ted/R2.0.8/publication" xmlns:n2016="http://publications.europa.eu/resource/schema/ted/2016/nuts" xsi:schemaLocation="http://publications.europa.eu/resource/schema/ted/R2.0.8/publication TED_EXPORT.xsd" DOC_ID="426046-2018" EDITION="2018189"><TECHNICAL_SECTION><RECEPTION_ID>18-432813-001</RECEPTION_ID><DELETION_DATE>20190104</DELETION_DATE><FORM_LG_LIST>EN CS DA DE ET EL ES FR IT LV LT HR HU MT NL PL PT SK SL FI SV RO GA BG </FORM_LG_LIST><COMMENTS>From Convertor</COMMENTS></TECHNICAL_SECTION><LINKS_SECTION><XML_SCHEMA_DEFINITION_LINK xlink:type="simple" xlink:href="http://ted.europa.eu" xlink:title="TED WEBSITE"/><OFFICIAL_FORMS_LINK xlink:type="simple" xlink:href="http://ted.europa.eu"/><FORMS_LABELS_LINK xlink:type="simple" xlink:href="http://ted.europa.eu"/><ORIGINAL_CPV_LINK xlink:type="simple " xlink:href="http://ted.europa.eu"/><ORIGINAL_NUTS_LINK xlink:type="simple" xlink:href="http://ted.europa.eu"/></LINKS_SECTION><CODED_DATA_SECTION><REF_OJS><COLL_OJ>S</COLL_OJ><NO_OJ>189</NO_OJ><DATE_PUB>20181002</DATE_PUB></REF_OJS><NOTICE_DATA><NO_DOC_OJS>2018/S 189-426046</NO_DOC_OJS><URI_LIST><URI_DOC LG="EN">http://ted.europa.eu/udl?uri=TED:NOTICE:426046-2018:TEXT:EN:HTML</URI_DOC><URI_DOC LG="CS">http://ted.europa.eu/udl?uri=TED:NOTICE:426046-2018:TEXT:CS:HTML</URI_DOC><URI_DOC LG="DA">http://ted.europa.eu/udl?uri=TED:NOTICE:426046-2018:TEXT:DA:HTML</URI_DOC><URI_DOC LG="DE">http://ted.europa.eu/udl?uri=TED:NOTICE:426046-2018:TEXT:DE:HTML</URI_DOC><URI_DOC LG="ET">http://ted.europa.eu/udl?uri=TED:NOTICE:426046-2018:TEXT:ET:HTML</URI_DOC><URI_DOC LG="EL">http://ted.europa.eu/udl?uri=TED:NOTICE:426046-2018:TEXT:EL:HTML</URI_DOC><URI_DOC LG="ES">http://ted.europa.eu/udl?uri=TED:NOTICE:426046-2018:TEXT:ES:HTML</URI_DOC><URI_DOC LG="FR">http://ted.europa.eu/udl?uri=TED:NOTICE:426046-2018:TEXT:FR:HTML</URI_DOC><URI_DOC LG="IT">http://ted.europa.eu/udl?uri=TED:NOTICE:426046-2018:TEXT:IT:HTML</URI_DOC><URI_DOC LG="LV">http://ted.europa.eu/udl?uri=TED:NOTICE:426046-2018:TEXT:LV:HTML</URI_DOC><URI_DOC LG="LT">http://ted.europa.eu/udl?uri=TED:NOTICE:426046-2018:TEXT:LT:HTML</URI_DOC><URI_DOC LG="HR">http://ted.europa.eu/udl?uri=TED:NOTICE:426046-2018:TEXT:HR:HTML</URI_DOC><URI_DOC LG="HU">http://ted.europa.eu/udl?uri=TED:NOTICE:426046-2018:TEXT:HU:HTML</URI_DOC><URI_DOC LG="MT">http://ted.europa.eu/udl?uri=TED:NOTICE:426046-2018:TEXT:MT:HTML</URI_DOC><URI_DOC LG="NL">http://ted.europa.eu/udl?uri=TED:NOTICE:426046-2018:TEXT:NL:HTML</URI_DOC><URI_DOC LG="PL">http://ted.europa.eu/udl?uri=TED:NOTICE:426046-2018:TEXT:PL:HTML</URI_DOC><URI_DOC LG="PT">http://ted.europa.eu/udl?uri=TED:NOTICE:426046-2018:TEXT:PT:HTML</URI_DOC><URI_DOC LG="SK">http://ted.europa.eu/udl?uri=TED:NOTICE:426046-2018:TEXT:SK:HTML</URI_DOC><URI_DOC LG="SL">http://ted.europa.eu/udl?uri=TED:NOTICE:426046-2018:TEXT:SL:HTML</URI_DOC><URI_DOC LG="FI">http://ted.europa.eu/udl?uri=TED:NOTICE:426046-2018:TEXT:FI:HTML</URI_DOC><URI_DOC LG="SV">http://ted.europa.eu/udl?uri=TED:NOTICE:426046-2018:TEXT:SV:HTML</URI_DOC><URI_DOC LG="RO">http://ted.europa.eu/udl?uri=TED:NOTICE:426046-2018:TEXT:RO:HTML</URI_DOC><URI_DOC LG="GA">http://ted.europa.eu/udl?uri=TED:NOTICE:426046-2018:TEXT:GA:HTML</URI_DOC><URI_DOC LG="BG">http://ted.europa.eu/udl?uri=TED:NOTICE:426046-2018:TEXT:BG:HTML</URI_DOC></URI_LIST><LG_ORIG>EN</LG_ORIG><ISO_COUNTRY VALUE="ZM"/><IA_URL_GENERAL/><ORIGINAL_CPV CODE="31321300">High-voltage cable</ORIGINAL_CPV></NOTICE_DATA><CODIF_DATA><DS_DATE_DISPATCH>20180926</DS_DATE_DISPATCH><DT_DATE_FOR_SUBMISSION>20181226</DT_DATE_FOR_SUBMISSION><AA_AUTHORITY_TYPE CODE="4">Utilities entity</AA_AUTHORITY_TYPE><TD_DOCUMENT_TYPE CODE="7">Contract award notice</TD_DOCUMENT_TYPE><NC_CONTRACT_NATURE CODE="1">Works</NC_CONTRACT_NATURE><PR_PROC CODE="1">Open procedure</PR_PROC><RP_REGULATION CODE="2">European Investment Bank, European Investment Fund, European Bank for Reconstruction and Development</RP_REGULATION><TY_TYPE_BID CODE="9">Not applicable</TY_TYPE_BID><AC_AWARD_CRIT CODE="Z">Not specified</AC_AWARD_CRIT><MA_MAIN_ACTIVITIES CODE="Z">Not specified</MA_MAIN_ACTIVITIES><HEADING>BI406</HEADING></CODIF_DATA></CODED_DATA_SECTION><TRANSLATION_SECTION><ML_TITLES><ML_TI_DOC LG="BG"><TI_CY>Замбия</TI_CY><TI_TOWN>Лусака</TI_TOWN><TI_TEXT><P>ЕИБ - Подстанции за високо напрежение</P></TI_TEXT></ML_TI_DOC><ML_TI_DOC LG="CS"><TI_CY>Zambie</TI_CY><TI_TOWN>Lusaka</TI_TOWN><TI_TEXT><P>EIB - Rozvodny vysokého napětí</P></TI_TEXT></ML_TI_DOC><ML_TI_DOC LG="DA"><TI_CY>Zambia</TI_CY><TI_TOWN>Lusaka</TI_TOWN><TI_TEXT><P>EIB - Højspændingstransformerstationer</P></TI_TEXT></ML_TI_DOC><ML_TI_DOC LG="DE"><TI_CY>Sambia</TI_CY><TI_TOWN>Lusaka</TI_TOWN><TI_TEXT><P>EIB - Hochspannungs-Umspannstationen</P></TI_TEXT></ML_TI_DOC><ML_TI_DOC LG="EL"><TI_CY>Ζάμπια</TI_CY><TI_TOWN>Λουσάκα</TI_TOWN><TI_TEXT><P>ΕΤΕπ - Υποσταθμοί υψηλής τάσης</P></TI_TEXT></ML_TI_DOC><ML_TI_DOC LG="EN"><TI_CY>Zambia</TI_CY><TI_TOWN>Lusaka</TI_TOWN><TI_TEXT><P>EIB - High voltage substations</P></TI_TEXT></ML_TI_DOC><ML_TI_DOC LG="ES"><TI_CY>Zambia</TI_CY><TI_TOWN>Lusaka</TI_TOWN><TI_TEXT><P>BEI - Subestaciones de alto voltaje</P></TI_TEXT></ML_TI_DOC><ML_TI_DOC LG="ET"><TI_CY>Sambia</TI_CY><TI_TOWN>Lusaka</TI_TOWN><TI_TEXT><P>EIP - Kõrgepingealajaamad</P></TI_TEXT></ML_TI_DOC><ML_TI_DOC LG="FI"><TI_CY>Sambia</TI_CY><TI_TOWN>Lusaka</TI_TOWN><TI_TEXT><P>EIP - Suurjännitemuuntoasemat</P></TI_TEXT></ML_TI_DOC><ML_TI_DOC LG="FR"><TI_CY>Zambie</TI_CY><TI_TOWN>Lusaka</TI_TOWN><TI_TEXT><P>BEI - Sous-stations à haute tension</P></TI_TEXT></ML_TI_DOC><ML_TI_DOC LG="GA"><TI_CY>Saimbia, an t</TI_CY><TI_TOWN>Lusaka</TI_TOWN><TI_TEXT><P>BEI - High voltage substations</P></TI_TEXT></ML_TI_DOC><ML_TI_DOC LG="HR"><TI_CY>Zambija</TI_CY><TI_TOWN>Lusaka</TI_TOWN><TI_TEXT><P>EIB - Egipatski prijenos električne energije</P></TI_TEXT></ML_TI_DOC><ML_TI_DOC LG="HU"><TI_CY>Zambia</TI_CY><TI_TOWN>Lusaka</TI_TOWN><TI_TEXT><P>EBB - Nagyfeszültségű alállomások</P></TI_TEXT></ML_TI_DOC><ML_TI_DOC LG="IT"><TI_CY>Zambia</TI_CY><TI_TOWN>Lusaka</TI_TOWN><TI_TEXT><P>BEI - Sottostazioni ad alto voltaggio</P></TI_TEXT></ML_TI_DOC><ML_TI_DOC LG="LT"><TI_CY>Zambija</TI_CY><TI_TOWN>Lusaka</TI_TOWN><TI_TEXT><P>EIB - Aukštos įtampos pastotės</P></TI_TEXT></ML_TI_DOC><ML_TI_DOC LG="LV"><TI_CY>Zambija</TI_CY><TI_TOWN>Lusaka</TI_TOWN><TI_TEXT><P>EIB - Augstsprieguma apakšstacijas</P></TI_TEXT></ML_TI_DOC><ML_TI_DOC LG="MT"><TI_CY>iż-Żambja</TI_CY><TI_TOWN>Lusaka</TI_TOWN><TI_TEXT><P>BEI - Stazzjonijiet sekondarji ta’ vultaġġ għoli</P></TI_TEXT></ML_TI_DOC><ML_TI_DOC LG="NL"><TI_CY>Zambia</TI_CY><TI_TOWN>Lusaka</TI_TOWN><TI_TEXT><P>EIB - Hoogspanningsonderstations</P></TI_TEXT></ML_TI_DOC><ML_TI_DOC LG="PL"><TI_CY>Zambia</TI_CY><TI_TOWN>Lusaka</TI_TOWN><TI_TEXT><P>EBI - Podstacje wysokiego napięcia</P></TI_TEXT></ML_TI_DOC><ML_TI_DOC LG="PT"><TI_CY>Zâmbia</TI_CY><TI_TOWN>Lusaca</TI_TOWN><TI_TEXT><P>BEI - Subestações de alta tensão</P></TI_TEXT></ML_TI_DOC><ML_TI_DOC LG="RO"><TI_CY>Zambia</TI_CY><TI_TOWN>Lusaka</TI_TOWN><TI_TEXT><P>BEI - Substaţii de înaltă tensiune</P></TI_TEXT></ML_TI_DOC><ML_TI_DOC LG="SK"><TI_CY>Zambia</TI_CY><TI_TOWN>Lusaka</TI_TOWN><TI_TEXT><P>EIB - Rozvodne vysokého napätia</P></TI_TEXT></ML_TI_DOC><ML_TI_DOC LG="SL"><TI_CY>Zambija</TI_CY><TI_TOWN>Lusaka</TI_TOWN><TI_TEXT><P>EIB - Visoko-napetostne razdelilne postaje</P></TI_TEXT></ML_TI_DOC><ML_TI_DOC LG="SV"><TI_CY>Zambia</TI_CY><TI_TOWN>Lusaka</TI_TOWN><TI_TEXT><P>EIB - Högspänningstransformatorstationer</P></TI_TEXT></ML_TI_DOC></ML_TITLES><ML_AA_NAMES><AA_NAME LG="EN">ZESCO Limited</AA_NAME><AA_NAME LG="CS">ZESCO Limited</AA_NAME><AA_NAME LG="DA">ZESCO Limited</AA_NAME><AA_NAME LG="DE">ZESCO Limited</AA_NAME><AA_NAME LG="ET">ZESCO Limited</AA_NAME><AA_NAME LG="EL">ZESCO Limited</AA_NAME><AA_NAME LG="ES">ZESCO Limited</AA_NAME><AA_NAME LG="FR">ZESCO Limited</AA_NAME><AA_NAME LG="IT">ZESCO Limited</AA_NAME><AA_NAME LG="LV">ZESCO Limited</AA_NAME><AA_NAME LG="LT">ZESCO Limited</AA_NAME><AA_NAME LG="HR">ZESCO Limited</AA_NAME><AA_NAME LG="HU">ZESCO Limited</AA_NAME><AA_NAME LG="MT">ZESCO Limited</AA_NAME><AA_NAME LG="NL">ZESCO Limited</AA_NAME><AA_NAME LG="PL">ZESCO Limited</AA_NAME><AA_NAME LG="PT">ZESCO Limited</AA_NAME><AA_NAME LG="SK">ZESCO Limited</AA_NAME><AA_NAME LG="SL">ZESCO Limited</AA_NAME><AA_NAME LG="FI">ZESCO Limited</AA_NAME><AA_NAME LG="SV">ZESCO Limited</AA_NAME><AA_NAME LG="RO">ZESCO Limited</AA_NAME><AA_NAME LG="GA">ZESCO Limited</AA_NAME><AA_NAME LG="BG">ZESCO Limited</AA_NAME></ML_AA_NAMES></TRANSLATION_SECTION><FORM_SECTION><OTH_NOT CATEGORY="ORIGINAL" LG="EN" VERSION="R2.0.8.S04.E01"><FD_OTH_NOT><TI_DOC><P>EIB - High voltage substations (ZM-Lusaka)</P></TI_DOC><STI_DOC><P>Award notice</P></STI_DOC><CONTENTS><P>Project title: Lusaka Power Transmission and Distribution Network</P><P>Project number: 2012-0602</P><P>Lot title: Procurement of 2 Substations and Associated Switching Stations in 2 lots – Lot 2: Chawama 132/11 kV</P><P>Publication reference: OJ/S S101 – 200031-2017</P><P>Publication date of the procurement notice: 27.5.2017</P><P>Promoter’s name: <ADDRESS_NOT_STRUCT><ORGANISATION>ZESCO Limited</ORGANISATION><BLK_BTX>, </BLK_BTX><TOWN>Lusaka</TOWN><BLK_BTX>, ZAMBIA</BLK_BTX></ADDRESS_NOT_STRUCT></P><P>Contract value: 10 768 794,05 USD</P><P>Date of award of contract: 20 September 2018</P><P>Number of bids received: 21</P><P>Name of successful bidder: Sieyuan Electric Co. Ltd in Joint Venture with Techno electric Engineering Co., Limited, Sieyuan No. 4399 Jindu road, MinhangDist, Shangai — China.</P></CONTENTS></FD_OTH_NOT></OTH_NOT><OTH_NOT CATEGORY="TRANSLATION" LG="CS" VERSION="R2.0.8.S04.E01"><FD_OTH_NOT><TI_DOC><P>EIB - High voltage substations (ZM-Lusaka)</P></TI_DOC><STI_DOC><P>Award notice</P></STI_DOC><CONTENTS><P>Project title: Lusaka Power Transmission and Distribution Network</P><P>Project number: 2012-0602</P><P>Lot title: Procurement of 2 Substations and Associated Switching Stations in 2 lots – Lot 2: Chawama 132/11 kV</P><P>Publication reference: OJ/S S101 – 200031-2017</P><P>Publication date of the procurement notice: 27.5.2017</P><P>Promoter’s name: <ADDRESS_NOT_STRUCT><ORGANISATION>ZESCO Limited</ORGANISATION><BLK_BTX>, </BLK_BTX><TOWN>Lusaka</TOWN><BLK_BTX>, ZAMBIA</BLK_BTX></ADDRESS_NOT_STRUCT></P><P>Contract value: 10 768 794,05 USD</P><P>Date of award of contract: 20 September 2018</P><P>Number of bids received: 21</P><P>Name of successful bidder: Sieyuan Electric Co. Ltd in Joint Venture with Techno electric Engineering Co., Limited, Sieyuan No. 4399 Jindu road, MinhangDist, Shangai — China.</P></CONTENTS></FD_OTH_NOT></OTH_NOT><OTH_NOT CATEGORY="TRANSLATION" LG="DA" VERSION="R2.0.8.S04.E01"><FD_OTH_NOT><TI_DOC><P>EIB - High voltage substations (ZM-Lusaka)</P></TI_DOC><STI_DOC><P>Award notice</P></STI_DOC><CONTENTS><P>Project title: Lusaka Power Transmission and Distribution Network</P><P>Project number: 2012-0602</P><P>Lot title: Procurement of 2 Substations and Associated Switching Stations in 2 lots – Lot 2: Chawama 132/11 kV</P><P>Publication reference: OJ/S S101 – 200031-2017</P><P>Publication date of the procurement notice: 27.5.2017</P><P>Promoter’s name: <ADDRESS_NOT_STRUCT><ORGANISATION>ZESCO Limited</ORGANISATION><BLK_BTX>, </BLK_BTX><TOWN>Lusaka</TOWN><BLK_BTX>, ZAMBIA</BLK_BTX></ADDRESS_NOT_STRUCT></P><P>Contract value: 10 768 794,05 USD</P><P>Date of award of contract: 20 September 2018</P><P>Number of bids received: 21</P><P>Name of successful bidder: Sieyuan Electric Co. Ltd in Joint Venture with Techno electric Engineering Co., Limited, Sieyuan No. 4399 Jindu road, MinhangDist, Shangai — China.</P></CONTENTS></FD_OTH_NOT></OTH_NOT><OTH_NOT CATEGORY="TRANSLATION" LG="DE" VERSION="R2.0.8.S04.E01"><FD_OTH_NOT><TI_DOC><P>EIB - High voltage substations (ZM-Lusaka)</P></TI_DOC><STI_DOC><P>Award notice</P></STI_DOC><CONTENTS><P>Project title: Lusaka Power Transmission and Distribution Network</P><P>Project number: 2012-0602</P><P>Lot title: Procurement of 2 Substations and Associated Switching Stations in 2 lots – Lot 2: Chawama 132/11 kV</P><P>Publication reference: OJ/S S101 – 200031-2017</P><P>Publication date of the procurement notice: 27.5.2017</P><P>Promoter’s name: <ADDRESS_NOT_STRUCT><ORGANISATION>ZESCO Limited</ORGANISATION><BLK_BTX>, </BLK_BTX><TOWN>Lusaka</TOWN><BLK_BTX>, ZAMBIA</BLK_BTX></ADDRESS_NOT_STRUCT></P><P>Contract value: 10 768 794,05 USD</P><P>Date of award of contract: 20 September 2018</P><P>Number of bids received: 21</P><P>Name of successful bidder: Sieyuan Electric Co. Ltd in Joint Venture with Techno electric Engineering Co., Limited, Sieyuan No. 4399 Jindu road, MinhangDist, Shangai — China.</P></CONTENTS></FD_OTH_NOT></OTH_NOT><OTH_NOT CATEGORY="TRANSLATION" LG="ET" VERSION="R2.0.8.S04.E01"><FD_OTH_NOT><TI_DOC><P>EIB - High voltage substations (ZM-Lusaka)</P></TI_DOC><STI_DOC><P>Award notice</P></STI_DOC><CONTENTS><P>Project title: Lusaka Power Transmission and Distribution Network</P><P>Project number: 2012-0602</P><P>Lot title: Procurement of 2 Substations and Associated Switching Stations in 2 lots – Lot 2: Chawama 132/11 kV</P><P>Publication reference: OJ/S S101 – 200031-2017</P><P>Publication date of the procurement notice: 27.5.2017</P><P>Promoter’s name: <ADDRESS_NOT_STRUCT><ORGANISATION>ZESCO Limited</ORGANISATION><BLK_BTX>, </BLK_BTX><TOWN>Lusaka</TOWN><BLK_BTX>, ZAMBIA</BLK_BTX></ADDRESS_NOT_STRUCT></P><P>Contract value: 10 768 794,05 USD</P><P>Date of award of contract: 20 September 2018</P><P>Number of bids received: 21</P><P>Name of successful bidder: Sieyuan Electric Co. Ltd in Joint Venture with Techno electric Engineering Co., Limited, Sieyuan No. 4399 Jindu road, MinhangDist, Shangai — China.</P></CONTENTS></FD_OTH_NOT></OTH_NOT><OTH_NOT CATEGORY="TRANSLATION" LG="EL" VERSION="R2.0.8.S04.E01"><FD_OTH_NOT><TI_DOC><P>EIB - High voltage substations (ZM-Lusaka)</P></TI_DOC><STI_DOC><P>Award notice</P></STI_DOC><CONTENTS><P>Project title: Lusaka Power Transmission and Distribution Network</P><P>Project number: 2012-0602</P><P>Lot title: Procurement of 2 Substations and Associated Switching Stations in 2 lots – Lot 2: Chawama 132/11 kV</P><P>Publication reference: OJ/S S101 – 200031-2017</P><P>Publication date of the procurement notice: 27.5.2017</P><P>Promoter’s name: <ADDRESS_NOT_STRUCT><ORGANISATION>ZESCO Limited</ORGANISATION><BLK_BTX>, </BLK_BTX><TOWN>Lusaka</TOWN><BLK_BTX>, ZAMBIA</BLK_BTX></ADDRESS_NOT_STRUCT></P><P>Contract value: 10 768 794,05 USD</P><P>Date of award of contract: 20 September 2018</P><P>Number of bids received: 21</P><P>Name of successful bidder: Sieyuan Electric Co. Ltd in Joint Venture with Techno electric Engineering Co., Limited, Sieyuan No. 4399 Jindu road, MinhangDist, Shangai — China.</P></CONTENTS></FD_OTH_NOT></OTH_NOT><OTH_NOT CATEGORY="TRANSLATION" LG="ES" VERSION="R2.0.8.S04.E01"><FD_OTH_NOT><TI_DOC><P>EIB - High voltage substations (ZM-Lusaka)</P></TI_DOC><STI_DOC><P>Award notice</P></STI_DOC><CONTENTS><P>Project title: Lusaka Power Transmission and Distribution Network</P><P>Project number: 2012-0602</P><P>Lot title: Procurement of 2 Substations and Associated Switching Stations in 2 lots – Lot 2: Chawama 132/11 kV</P><P>Publication reference: OJ/S S101 – 200031-2017</P><P>Publication date of the procurement notice: 27.5.2017</P><P>Promoter’s name: <ADDRESS_NOT_STRUCT><ORGANISATION>ZESCO Limited</ORGANISATION><BLK_BTX>, </BLK_BTX><TOWN>Lusaka</TOWN><BLK_BTX>, ZAMBIA</BLK_BTX></ADDRESS_NOT_STRUCT></P><P>Contract value: 10 768 794,05 USD</P><P>Date of award of contract: 20 September 2018</P><P>Number of bids received: 21</P><P>Name of successful bidder: Sieyuan Electric Co. Ltd in Joint Venture with Techno electric Engineering Co., Limited, Sieyuan No. 4399 Jindu road, MinhangDist, Shangai — China.</P></CONTENTS></FD_OTH_NOT></OTH_NOT><OTH_NOT CATEGORY="TRANSLATION" LG="FR" VERSION="R2.0.8.S04.E01"><FD_OTH_NOT><TI_DOC><P>EIB - High voltage substations (ZM-Lusaka)</P></TI_DOC><STI_DOC><P>Award notice</P></STI_DOC><CONTENTS><P>Project title: Lusaka Power Transmission and Distribution Network</P><P>Project number: 2012-0602</P><P>Lot title: Procurement of 2 Substations and Associated Switching Stations in 2 lots – Lot 2: Chawama 132/11 kV</P><P>Publication reference: OJ/S S101 – 200031-2017</P><P>Publication date of the procurement notice: 27.5.2017</P><P>Promoter’s name: <ADDRESS_NOT_STRUCT><ORGANISATION>ZESCO Limited</ORGANISATION><BLK_BTX>, </BLK_BTX><TOWN>Lusaka</TOWN><BLK_BTX>, ZAMBIA</BLK_BTX></ADDRESS_NOT_STRUCT></P><P>Contract value: 10 768 794,05 USD</P><P>Date of award of contract: 20 September 2018</P><P>Number of bids received: 21</P><P>Name of successful bidder: Sieyuan Electric Co. Ltd in Joint Venture with Techno electric Engineering Co., Limited, Sieyuan No. 4399 Jindu road, MinhangDist, Shangai — China.</P></CONTENTS></FD_OTH_NOT></OTH_NOT><OTH_NOT CATEGORY="TRANSLATION" LG="IT" VERSION="R2.0.8.S04.E01"><FD_OTH_NOT><TI_DOC><P>EIB - High voltage substations (ZM-Lusaka)</P></TI_DOC><STI_DOC><P>Award notice</P></STI_DOC><CONTENTS><P>Project title: Lusaka Power Transmission and Distribution Network</P><P>Project number: 2012-0602</P><P>Lot title: Procurement of 2 Substations and Associated Switching Stations in 2 lots – Lot 2: Chawama 132/11 kV</P><P>Publication reference: OJ/S S101 – 200031-2017</P><P>Publication date of the procurement notice: 27.5.2017</P><P>Promoter’s name: <ADDRESS_NOT_STRUCT><ORGANISATION>ZESCO Limited</ORGANISATION><BLK_BTX>, </BLK_BTX><TOWN>Lusaka</TOWN><BLK_BTX>, ZAMBIA</BLK_BTX></ADDRESS_NOT_STRUCT></P><P>Contract value: 10 768 794,05 USD</P><P>Date of award of contract: 20 September 2018</P><P>Number of bids received: 21</P><P>Name of successful bidder: Sieyuan Electric Co. Ltd in Joint Venture with Techno electric Engineering Co., Limited, Sieyuan No. 4399 Jindu road, MinhangDist, Shangai — China.</P></CONTENTS></FD_OTH_NOT></OTH_NOT><OTH_NOT CATEGORY="TRANSLATION" LG="LV" VERSION="R2.0.8.S04.E01"><FD_OTH_NOT><TI_DOC><P>EIB - High voltage substations (ZM-Lusaka)</P></TI_DOC><STI_DOC><P>Award notice</P></STI_DOC><CONTENTS><P>Project title: Lusaka Power Transmission and Distribution Network</P><P>Project number: 2012-0602</P><P>Lot title: Procurement of 2 Substations and Associated Switching Stations in 2 lots – Lot 2: Chawama 132/11 kV</P><P>Publication reference: OJ/S S101 – 200031-2017</P><P>Publication date of the procurement notice: 27.5.2017</P><P>Promoter’s name: <ADDRESS_NOT_STRUCT><ORGANISATION>ZESCO Limited</ORGANISATION><BLK_BTX>, </BLK_BTX><TOWN>Lusaka</TOWN><BLK_BTX>, ZAMBIA</BLK_BTX></ADDRESS_NOT_STRUCT></P><P>Contract value: 10 768 794,05 USD</P><P>Date of award of contract: 20 September 2018</P><P>Number of bids received: 21</P><P>Name of successful bidder: Sieyuan Electric Co. Ltd in Joint Venture with Techno electric Engineering Co., Limited, Sieyuan No. 4399 Jindu road, MinhangDist, Shangai — China.</P></CONTENTS></FD_OTH_NOT></OTH_NOT><OTH_NOT CATEGORY="TRANSLATION" LG="LT" VERSION="R2.0.8.S04.E01"><FD_OTH_NOT><TI_DOC><P>EIB - High voltage substations (ZM-Lusaka)</P></TI_DOC><STI_DOC><P>Award notice</P></STI_DOC><CONTENTS><P>Project title: Lusaka Power Transmission and Distribution Network</P><P>Project number: 2012-0602</P><P>Lot title: Procurement of 2 Substations and Associated Switching Stations in 2 lots – Lot 2: Chawama 132/11 kV</P><P>Publication reference: OJ/S S101 – 200031-2017</P><P>Publication date of the procurement notice: 27.5.2017</P><P>Promoter’s name: <ADDRESS_NOT_STRUCT><ORGANISATION>ZESCO Limited</ORGANISATION><BLK_BTX>, </BLK_BTX><TOWN>Lusaka</TOWN><BLK_BTX>, ZAMBIA</BLK_BTX></ADDRESS_NOT_STRUCT></P><P>Contract value: 10 768 794,05 USD</P><P>Date of award of contract: 20 September 2018</P><P>Number of bids received: 21</P><P>Name of successful bidder: Sieyuan Electric Co. Ltd in Joint Venture with Techno electric Engineering Co., Limited, Sieyuan No. 4399 Jindu road, MinhangDist, Shangai — China.</P></CONTENTS></FD_OTH_NOT></OTH_NOT><OTH_NOT CATEGORY="TRANSLATION" LG="HR" VERSION="R2.0.8.S04.E01"><FD_OTH_NOT><TI_DOC><P>EIB - High voltage substations (ZM-Lusaka)</P></TI_DOC><STI_DOC><P>Award notice</P></STI_DOC><CONTENTS><P>Project title: Lusaka Power Transmission and Distribution Network</P><P>Project number: 2012-0602</P><P>Lot title: Procurement of 2 Substations and Associated Switching Stations in 2 lots – Lot 2: Chawama 132/11 kV</P><P>Publication reference: OJ/S S101 – 200031-2017</P><P>Publication date of the procurement notice: 27.5.2017</P><P>Promoter’s name: <ADDRESS_NOT_STRUCT><ORGANISATION>ZESCO Limited</ORGANISATION><BLK_BTX>, </BLK_BTX><TOWN>Lusaka</TOWN><BLK_BTX>, ZAMBIA</BLK_BTX></ADDRESS_NOT_STRUCT></P><P>Contract value: 10 768 794,05 USD</P><P>Date of award of contract: 20 September 2018</P><P>Number of bids received: 21</P><P>Name of successful bidder: Sieyuan Electric Co. Ltd in Joint Venture with Techno electric Engineering Co., Limited, Sieyuan No. 4399 Jindu road, MinhangDist, Shangai — China.</P></CONTENTS></FD_OTH_NOT></OTH_NOT><OTH_NOT CATEGORY="TRANSLATION" LG="HU" VERSION="R2.0.8.S04.E01"><FD_OTH_NOT><TI_DOC><P>EIB - High voltage substations (ZM-Lusaka)</P></TI_DOC><STI_DOC><P>Award notice</P></STI_DOC><CONTENTS><P>Project title: Lusaka Power Transmission and Distribution Network</P><P>Project number: 2012-0602</P><P>Lot title: Procurement of 2 Substations and Associated Switching Stations in 2 lots – Lot 2: Chawama 132/11 kV</P><P>Publication reference: OJ/S S101 – 200031-2017</P><P>Publication date of the procurement notice: 27.5.2017</P><P>Promoter’s name: <ADDRESS_NOT_STRUCT><ORGANISATION>ZESCO Limited</ORGANISATION><BLK_BTX>, </BLK_BTX><TOWN>Lusaka</TOWN><BLK_BTX>, ZAMBIA</BLK_BTX></ADDRESS_NOT_STRUCT></P><P>Contract value: 10 768 794,05 USD</P><P>Date of award of contract: 20 September 2018</P><P>Number of bids received: 21</P><P>Name of successful bidder: Sieyuan Electric Co. Ltd in Joint Venture with Techno electric Engineering Co., Limited, Sieyuan No. 4399 Jindu road, MinhangDist, Shangai — China.</P></CONTENTS></FD_OTH_NOT></OTH_NOT><OTH_NOT CATEGORY="TRANSLATION" LG="MT" VERSION="R2.0.8.S04.E01"><FD_OTH_NOT><TI_DOC><P>EIB - High voltage substations (ZM-Lusaka)</P></TI_DOC><STI_DOC><P>Award notice</P></STI_DOC><CONTENTS><P>Project title: Lusaka Power Transmission and Distribution Network</P><P>Project number: 2012-0602</P><P>Lot title: Procurement of 2 Substations and Associated Switching Stations in 2 lots – Lot 2: Chawama 132/11 kV</P><P>Publication reference: OJ/S S101 – 200031-2017</P><P>Publication date of the procurement notice: 27.5.2017</P><P>Promoter’s name: <ADDRESS_NOT_STRUCT><ORGANISATION>ZESCO Limited</ORGANISATION><BLK_BTX>, </BLK_BTX><TOWN>Lusaka</TOWN><BLK_BTX>, ZAMBIA</BLK_BTX></ADDRESS_NOT_STRUCT></P><P>Contract value: 10 768 794,05 USD</P><P>Date of award of contract: 20 September 2018</P><P>Number of bids received: 21</P><P>Name of successful bidder: Sieyuan Electric Co. Ltd in Joint Venture with Techno electric Engineering Co., Limited, Sieyuan No. 4399 Jindu road, MinhangDist, Shangai — China.</P></CONTENTS></FD_OTH_NOT></OTH_NOT><OTH_NOT CATEGORY="TRANSLATION" LG="NL" VERSION="R2.0.8.S04.E01"><FD_OTH_NOT><TI_DOC><P>EIB - High voltage substations (ZM-Lusaka)</P></TI_DOC><STI_DOC><P>Award notice</P></STI_DOC><CONTENTS><P>Project title: Lusaka Power Transmission and Distribution Network</P><P>Project number: 2012-0602</P><P>Lot title: Procurement of 2 Substations and Associated Switching Stations in 2 lots – Lot 2: Chawama 132/11 kV</P><P>Publication reference: OJ/S S101 – 200031-2017</P><P>Publication date of the procurement notice: 27.5.2017</P><P>Promoter’s name: <ADDRESS_NOT_STRUCT><ORGANISATION>ZESCO Limited</ORGANISATION><BLK_BTX>, </BLK_BTX><TOWN>Lusaka</TOWN><BLK_BTX>, ZAMBIA</BLK_BTX></ADDRESS_NOT_STRUCT></P><P>Contract value: 10 768 794,05 USD</P><P>Date of award of contract: 20 September 2018</P><P>Number of bids received: 21</P><P>Name of successful bidder: Sieyuan Electric Co. Ltd in Joint Venture with Techno electric Engineering Co., Limited, Sieyuan No. 4399 Jindu road, MinhangDist, Shangai — China.</P></CONTENTS></FD_OTH_NOT></OTH_NOT><OTH_NOT CATEGORY="TRANSLATION" LG="PL" VERSION="R2.0.8.S04.E01"><FD_OTH_NOT><TI_DOC><P>EIB - High voltage substations (ZM-Lusaka)</P></TI_DOC><STI_DOC><P>Award notice</P></STI_DOC><CONTENTS><P>Project title: Lusaka Power Transmission and Distribution Network</P><P>Project number: 2012-0602</P><P>Lot title: Procurement of 2 Substations and Associated Switching Stations in 2 lots – Lot 2: Chawama 132/11 kV</P><P>Publication reference: OJ/S S101 – 200031-2017</P><P>Publication date of the procurement notice: 27.5.2017</P><P>Promoter’s name: <ADDRESS_NOT_STRUCT><ORGANISATION>ZESCO Limited</ORGANISATION><BLK_BTX>, </BLK_BTX><TOWN>Lusaka</TOWN><BLK_BTX>, ZAMBIA</BLK_BTX></ADDRESS_NOT_STRUCT></P><P>Contract value: 10 768 794,05 USD</P><P>Date of award of contract: 20 September 2018</P><P>Number of bids received: 21</P><P>Name of successful bidder: Sieyuan Electric Co. Ltd in Joint Venture with Techno electric Engineering Co., Limited, Sieyuan No. 4399 Jindu road, MinhangDist, Shangai — China.</P></CONTENTS></FD_OTH_NOT></OTH_NOT><OTH_NOT CATEGORY="TRANSLATION" LG="PT" VERSION="R2.0.8.S04.E01"><FD_OTH_NOT><TI_DOC><P>EIB - High voltage substations (ZM-Lusaka)</P></TI_DOC><STI_DOC><P>Award notice</P></STI_DOC><CONTENTS><P>Project title: Lusaka Power Transmission and Distribution Network</P><P>Project number: 2012-0602</P><P>Lot title: Procurement of 2 Substations and Associated Switching Stations in 2 lots – Lot 2: Chawama 132/11 kV</P><P>Publication reference: OJ/S S101 – 200031-2017</P><P>Publication date of the procurement notice: 27.5.2017</P><P>Promoter’s name: <ADDRESS_NOT_STRUCT><ORGANISATION>ZESCO Limited</ORGANISATION><BLK_BTX>, </BLK_BTX><TOWN>Lusaka</TOWN><BLK_BTX>, ZAMBIA</BLK_BTX></ADDRESS_NOT_STRUCT></P><P>Contract value: 10 768 794,05 USD</P><P>Date of award of contract: 20 September 2018</P><P>Number of bids received: 21</P><P>Name of successful bidder: Sieyuan Electric Co. Ltd in Joint Venture with Techno electric Engineering Co., Limited, Sieyuan No. 4399 Jindu road, MinhangDist, Shangai — China.</P></CONTENTS></FD_OTH_NOT></OTH_NOT><OTH_NOT CATEGORY="TRANSLATION" LG="SK" VERSION="R2.0.8.S04.E01"><FD_OTH_NOT><TI_DOC><P>EIB - High voltage substations (ZM-Lusaka)</P></TI_DOC><STI_DOC><P>Award notice</P></STI_DOC><CONTENTS><P>Project title: Lusaka Power Transmission and Distribution Network</P><P>Project number: 2012-0602</P><P>Lot title: Procurement of 2 Substations and Associated Switching Stations in 2 lots – Lot 2: Chawama 132/11 kV</P><P>Publication reference: OJ/S S101 – 200031-2017</P><P>Publication date of the procurement notice: 27.5.2017</P><P>Promoter’s name: <ADDRESS_NOT_STRUCT><ORGANISATION>ZESCO Limited</ORGANISATION><BLK_BTX>, </BLK_BTX><TOWN>Lusaka</TOWN><BLK_BTX>, ZAMBIA</BLK_BTX></ADDRESS_NOT_STRUCT></P><P>Contract value: 10 768 794,05 USD</P><P>Date of award of contract: 20 September 2018</P><P>Number of bids received: 21</P><P>Name of successful bidder: Sieyuan Electric Co. Ltd in Joint Venture with Techno electric Engineering Co., Limited, Sieyuan No. 4399 Jindu road, MinhangDist, Shangai — China.</P></CONTENTS></FD_OTH_NOT></OTH_NOT><OTH_NOT CATEGORY="TRANSLATION" LG="SL" VERSION="R2.0.8.S04.E01"><FD_OTH_NOT><TI_DOC><P>EIB - High voltage substations (ZM-Lusaka)</P></TI_DOC><STI_DOC><P>Award notice</P></STI_DOC><CONTENTS><P>Project title: Lusaka Power Transmission and Distribution Network</P><P>Project number: 2012-0602</P><P>Lot title: Procurement of 2 Substations and Associated Switching Stations in 2 lots – Lot 2: Chawama 132/11 kV</P><P>Publication reference: OJ/S S101 – 200031-2017</P><P>Publication date of the procurement notice: 27.5.2017</P><P>Promoter’s name: <ADDRESS_NOT_STRUCT><ORGANISATION>ZESCO Limited</ORGANISATION><BLK_BTX>, </BLK_BTX><TOWN>Lusaka</TOWN><BLK_BTX>, ZAMBIA</BLK_BTX></ADDRESS_NOT_STRUCT></P><P>Contract value: 10 768 794,05 USD</P><P>Date of award of contract: 20 September 2018</P><P>Number of bids received: 21</P><P>Name of successful bidder: Sieyuan Electric Co. Ltd in Joint Venture with Techno electric Engineering Co., Limited, Sieyuan No. 4399 Jindu road, MinhangDist, Shangai — China.</P></CONTENTS></FD_OTH_NOT></OTH_NOT><OTH_NOT CATEGORY="TRANSLATION" LG="FI" VERSION="R2.0.8.S04.E01"><FD_OTH_NOT><TI_DOC><P>EIB - High voltage substations (ZM-Lusaka)</P></TI_DOC><STI_DOC><P>Award notice</P></STI_DOC><CONTENTS><P>Project title: Lusaka Power Transmission and Distribution Network</P><P>Project number: 2012-0602</P><P>Lot title: Procurement of 2 Substations and Associated Switching Stations in 2 lots – Lot 2: Chawama 132/11 kV</P><P>Publication reference: OJ/S S101 – 200031-2017</P><P>Publication date of the procurement notice: 27.5.2017</P><P>Promoter’s name: <ADDRESS_NOT_STRUCT><ORGANISATION>ZESCO Limited</ORGANISATION><BLK_BTX>, </BLK_BTX><TOWN>Lusaka</TOWN><BLK_BTX>, ZAMBIA</BLK_BTX></ADDRESS_NOT_STRUCT></P><P>Contract value: 10 768 794,05 USD</P><P>Date of award of contract: 20 September 2018</P><P>Number of bids received: 21</P><P>Name of successful bidder: Sieyuan Electric Co. Ltd in Joint Venture with Techno electric Engineering Co., Limited, Sieyuan No. 4399 Jindu road, MinhangDist, Shangai — China.</P></CONTENTS></FD_OTH_NOT></OTH_NOT><OTH_NOT CATEGORY="TRANSLATION" LG="SV" VERSION="R2.0.8.S04.E01"><FD_OTH_NOT><TI_DOC><P>EIB - High voltage substations (ZM-Lusaka)</P></TI_DOC><STI_DOC><P>Award notice</P></STI_DOC><CONTENTS><P>Project title: Lusaka Power Transmission and Distribution Network</P><P>Project number: 2012-0602</P><P>Lot title: Procurement of 2 Substations and Associated Switching Stations in 2 lots – Lot 2: Chawama 132/11 kV</P><P>Publication reference: OJ/S S101 – 200031-2017</P><P>Publication date of the procurement notice: 27.5.2017</P><P>Promoter’s name: <ADDRESS_NOT_STRUCT><ORGANISATION>ZESCO Limited</ORGANISATION><BLK_BTX>, </BLK_BTX><TOWN>Lusaka</TOWN><BLK_BTX>, ZAMBIA</BLK_BTX></ADDRESS_NOT_STRUCT></P><P>Contract value: 10 768 794,05 USD</P><P>Date of award of contract: 20 September 2018</P><P>Number of bids received: 21</P><P>Name of successful bidder: Sieyuan Electric Co. Ltd in Joint Venture with Techno electric Engineering Co., Limited, Sieyuan No. 4399 Jindu road, MinhangDist, Shangai — China.</P></CONTENTS></FD_OTH_NOT></OTH_NOT><OTH_NOT CATEGORY="TRANSLATION" LG="RO" VERSION="R2.0.8.S04.E01"><FD_OTH_NOT><TI_DOC><P>EIB - High voltage substations (ZM-Lusaka)</P></TI_DOC><STI_DOC><P>Award notice</P></STI_DOC><CONTENTS><P>Project title: Lusaka Power Transmission and Distribution Network</P><P>Project number: 2012-0602</P><P>Lot title: Procurement of 2 Substations and Associated Switching Stations in 2 lots – Lot 2: Chawama 132/11 kV</P><P>Publication reference: OJ/S S101 – 200031-2017</P><P>Publication date of the procurement notice: 27.5.2017</P><P>Promoter’s name: <ADDRESS_NOT_STRUCT><ORGANISATION>ZESCO Limited</ORGANISATION><BLK_BTX>, </BLK_BTX><TOWN>Lusaka</TOWN><BLK_BTX>, ZAMBIA</BLK_BTX></ADDRESS_NOT_STRUCT></P><P>Contract value: 10 768 794,05 USD</P><P>Date of award of contract: 20 September 2018</P><P>Number of bids received: 21</P><P>Name of successful bidder: Sieyuan Electric Co. Ltd in Joint Venture with Techno electric Engineering Co., Limited, Sieyuan No. 4399 Jindu road, MinhangDist, Shangai — China.</P></CONTENTS></FD_OTH_NOT></OTH_NOT><OTH_NOT CATEGORY="TRANSLATION" LG="GA" VERSION="R2.0.8.S04.E01"><FD_OTH_NOT><TI_DOC><P>EIB - High voltage substations (ZM-Lusaka)</P></TI_DOC><STI_DOC><P>Award notice</P></STI_DOC><CONTENTS><P>Project title: Lusaka Power Transmission and Distribution Network</P><P>Project number: 2012-0602</P><P>Lot title: Procurement of 2 Substations and Associated Switching Stations in 2 lots – Lot 2: Chawama 132/11 kV</P><P>Publication reference: OJ/S S101 – 200031-2017</P><P>Publication date of the procurement notice: 27.5.2017</P><P>Promoter’s name: <ADDRESS_NOT_STRUCT><ORGANISATION>ZESCO Limited</ORGANISATION><BLK_BTX>, </BLK_BTX><TOWN>Lusaka</TOWN><BLK_BTX>, ZAMBIA</BLK_BTX></ADDRESS_NOT_STRUCT></P><P>Contract value: 10 768 794,05 USD</P><P>Date of award of contract: 20 September 2018</P><P>Number of bids received: 21</P><P>Name of successful bidder: Sieyuan Electric Co. Ltd in Joint Venture with Techno electric Engineering Co., Limited, Sieyuan No. 4399 Jindu road, MinhangDist, Shangai — China.</P></CONTENTS></FD_OTH_NOT></OTH_NOT><OTH_NOT CATEGORY="TRANSLATION" LG="BG" VERSION="R2.0.8.S04.E01"><FD_OTH_NOT><TI_DOC><P>EIB - High voltage substations (ZM-Lusaka)</P></TI_DOC><STI_DOC><P>Award notice</P></STI_DOC><CONTENTS><P>Project title: Lusaka Power Transmission and Distribution Network</P><P>Project number: 2012-0602</P><P>Lot title: Procurement of 2 Substations and Associated Switching Stations in 2 lots – Lot 2: Chawama 132/11 kV</P><P>Publication reference: OJ/S S101 – 200031-2017</P><P>Publication date of the procurement notice: 27.5.2017</P><P>Promoter’s name: <ADDRESS_NOT_STRUCT><ORGANISATION>ZESCO Limited</ORGANISATION><BLK_BTX>, </BLK_BTX><TOWN>Lusaka</TOWN><BLK_BTX>, ZAMBIA</BLK_BTX></ADDRESS_NOT_STRUCT></P><P>Contract value: 10 768 794,05 USD</P><P>Date of award of contract: 20 September 2018</P><P>Number of bids received: 21</P><P>Name of successful bidder: Sieyuan Electric Co. Ltd in Joint Venture with Techno electric Engineering Co., Limited, Sieyuan No. 4399 Jindu road, MinhangDist, Shangai — China.</P></CONTENTS></FD_OTH_NOT></OTH_NOT></FORM_SECTION></TED_EXPORT>' )
python
# -*- coding: utf-8 -*- """ Last modified June 2021 @author: pauliuk see: https://github.com/IndEcol/openLCA_ecoinvent_Material_Footprint_LCIA """ # Script ei_LCIA_MF_populate.py # Import required libraries: #%% import openpyxl import numpy as np import os import uuid import json import mf_Paths ############################# # Functions & Constants # ############################# def CF_generate(mli,Val,dnames,duuid,dunit,ei_version_string): # create dictionary with characterisation factor if ei_version_string == '_ei_3_7_1' or ei_version_string == '_ei_3_8': # unit defintions have not changed, are the same for both ei versions. U_Mass = { "@type": "Unit", "@id": "20aadc24-a391-41cf-b340-3e4529f44bde", "name": "kg"} U_Energy = { "@type": "Unit", "@id": "52765a6c-3896-43c2-b2f4-c679acf13efe", "name": "MJ"} U_Volume = { "@type": "Unit", "@id": "1c3a9695-398d-4b1f-b07e-a8715b610f70", "name": "m3"} FP_Mass = { "@type": "FlowProperty", "@id": "93a60a56-a3c8-11da-a746-0800200b9a66", "name": "Mass", "categoryPath": [ "Technical flow properties"]} FP_Energy = { "@type": "FlowProperty", "@id": "f6811440-ee37-11de-8a39-0800200c9a66", "name": "Energy", "categoryPath": [ "Technical flow properties"]} FP_Volumne = { "@type": "FlowProperty", "@id": "93a60a56-a3c8-22da-a746-0800200c9a66", "name": "Volume", "categoryPath": [ "Technical flow properties"]} CF = {} CF["@type"] = "ImpactFactor" CF["value"] = Val[mli] CF["flow"] = {"@type": "Flow", "@id": duuid[mli], "name": dnames[mli], "categoryPath": [ "Elementary flows", "Resource", "in ground"], "flowType": "ELEMENTARY_FLOW", "refUnit": dunit[mli]} if dunit[mli] == 'kg': CF["unit"] = U_Mass CF["flowProperty"] = FP_Mass elif dunit[mli] == 'MJ': CF["unit"] = U_Energy CF["flowProperty"] = FP_Energy elif dunit[mli] == 'm3': CF["unit"] = U_Volume CF["flowProperty"] = FP_Volumne else: None return CF ################# # MAIN # ################# # Set configuration data #ei_version_string = '_ei_3_7_1' ei_version_string = '_ei_3_8' #%% if ei_version_string == '_ei_3_7_1': tp = mf_Paths.data_path_ei371 MSn = 'LCIA_Define_ecoinvent_3_7' MDn = 'ecoinvent_3_7_Match' DN = 414 if ei_version_string == '_ei_3_8': tp = mf_Paths.data_path_ei38 MSn = 'LCIA_Define_ecoinvent_3_8' MDn = 'ecoinvent_3_8_Match' DN = 419 ScriptConfig = {} ScriptConfig['Current_UUID'] = str(uuid.uuid4()) ################################################################################### # Import data from masterfile # ################################################################################### # open master file MasterFile = openpyxl.load_workbook(os.path.join(mf_Paths.data_path_main,'Material_Footprint_LCIA_Master_V1.xlsx'),data_only=True) # read LCIA indicator method uuids MS = MasterFile[MSn] mf_uuid = [] wf_uuid = [] for m in range(10,22): mf_uuid.append(MS.cell(m, 5).value) for m in range(10,14): wf_uuid.append(MS.cell(m, 15).value) # read master data MD = MasterFile[MDn] dnames = [] duuid = [] dselect = [] dunit = [] drmi = [] dtmr = [] for m in range(2,2+DN): dnames.append( MD.cell(m, 2).value) duuid.append( MD.cell(m, 4).value) dselect.append(MD.cell(m,11).value) dunit.append( MD.cell(m,15).value) drmi.append( MD.cell(m,16).value) dtmr.append( MD.cell(m,21).value) # Tables with 1/0 flags to select individual factor for a given indicator MFSel = np.zeros((DN,6)) TFSel = np.zeros((DN,6)) WFSel = np.zeros((DN,4)) for m in range(2,2+DN): for n in range(24,30): MFSel[m-2,n-24] = MD.cell(m,n).value for n in range(30,36): TFSel[m-2,n-30] = MD.cell(m,n).value for n in range(36,40): WFSel[m-2,n-36] = MD.cell(m,n).value ################################################################################### # Sort data into json files # ################################################################################### #%% # loop over RMI files for m in range(0,6): f_in = os.path.join(tp,'lcia_categories',mf_uuid[m]+'.json') with open(f_in, 'r+') as f: thisd = json.load(f) del thisd['impactFactors'][0] # delete the two factors that are still there from copying the files del thisd['impactFactors'][0] # add new impact factors from master data for mli in range(0,DN): if dselect[mli] != 1 and MFSel[mli,m] == 1: # add this value as impact/characterisation factor CF = CF_generate(mli,drmi,dnames,duuid,dunit,ei_version_string) # add new CF to json file: thisd['impactFactors'].append(CF) # wrap up and save f.seek(0) # reset file position to the beginning. json.dump(thisd, f, indent=4) f.truncate() # remove remaining part f.close() # loop over TMR files for m in range(0,6): f_in = os.path.join(tp,'lcia_categories',mf_uuid[m+6]+'.json') with open(f_in, 'r+') as f: thisd = json.load(f) del thisd['impactFactors'][0] # delete the two factors that are still there from copying the files del thisd['impactFactors'][0] # add new impact factors from master data for mli in range(0,DN): if dselect[mli] != 1 and TFSel[mli,m] == 1: # add this value as impact/characterisation factor CF = CF_generate(mli,dtmr,dnames,duuid,dunit,ei_version_string) # add new CF to json file: thisd['impactFactors'].append(CF) # wrap up and save f.seek(0) # reset file position to the beginning. json.dump(thisd, f, indent=4) f.truncate() # remove remaining part f.close() # loop over WF files for m in range(0,4): f_in = os.path.join(tp,'lcia_categories',wf_uuid[m]+'.json') with open(f_in, 'r+') as f: thisd = json.load(f) del thisd['impactFactors'][0] # delete the two factors that are still there from copying the files del thisd['impactFactors'][0] # add new impact factors from master data for mli in range(0,DN): if dselect[mli] != 1 and WFSel[mli,m] == 1: # add this value as impact/characterisation factor CF = CF_generate(mli,drmi,dnames,duuid,dunit,ei_version_string) # add new CF to json file: thisd['impactFactors'].append(CF) # wrap up and save f.seek(0) # reset file position to the beginning. json.dump(thisd, f, indent=4) f.truncate() # remove remaining part f.close() #%% Sandbox # # # # The End # #
python
""" pg_seldump -- package objects """ from .consts import VERSION as __version__ # noqa
python
#!/usr/bin/env python # -*- coding: utf-8 -*- from flask_migrate import Migrate, MigrateCommand from flask_script import Manager, Server from app.utils import get_env def create_app(): import os from flask import Flask from flask_sqlalchemy import SQLAlchemy def get_config(x=None): return { 'development': 'config.DevelopementConfig', 'dev': 'config.DevelopementConfig', 'testing': 'config.TestingConfig', 'default': 'config.ProductionConfig', 'production': 'config.ProductionConfig', 'prod': 'config.ProductionConfig' }.get(str(x).lower(), 'config.ProductionConfig') app = Flask(__name__.split('.')[0], static_folder='static', template_folder='templates', static_url_path='', instance_relative_config=True) app.config.from_object(get_config(get_env('FLASK_ENV', default='dev' if os.sys.platform == 'win32' else 'prod'))) app.config.from_pyfile('config.cfg', silent=True) print(app.secret_key) @app.teardown_request def teardown_request_func(error=None): """ This function will run after a request, regardless if an exception occurs or not. It's a good place to do some cleanup, such as closing any database connections. If an exception is raised, it will be passed to the function. You should so everything in your power to ensure this function does not fail, so liberal use of try/except blocks is recommended. """ if error: # Log the error app.logger.error(error) @app.route('/index', methods=['GET']) @app.route('/index.html', methods=['GET']) @app.route('/', methods=['GET']) def _root(error=None): from flask import redirect, url_for return redirect(url_for('api.root')) @app.after_request def jsonify_request(response): """JSONify the response. https://github.com/Fuyukai/OWAPI/blob/master/owapi/app.py#L208""" if response.headers.get('Content-Type', '').lower() == app.config['JSONIFY_MIMETYPE'].lower(): from flask import request import json if request.args.get('format', 'json') in ['json_pretty', 'pretty'] or app.config['JSONIFY_PRETTYPRINT_REGULAR']: from datetime import datetime, timedelta, timezone from email.utils import format_datetime response.set_data(json.dumps(response.get_json(), sort_keys=app.config['JSON_SORT_KEYS'], ensure_ascii=app.config['JSON_AS_ASCII'], indent=4, separators=(',', ': '))) response.headers['Cache-Control'] = 'public, max-age=300' response.headers['Expires'] = format_datetime((datetime.utcnow() + timedelta(seconds=300)).replace(tzinfo=timezone.utc), usegmt=True) return response def get_http_exception_handler(app): """Overrides the default http exception handler to return JSON.""" from functools import wraps handle_http_exception = app.handle_http_exception @wraps(handle_http_exception) def ret_val(error): """Generic exception handler for general exceptions""" if not app.env.lower().startswith('dev') and error.code == 404: from flask import redirect, url_for return redirect(url_for('api.root')) #from werkzeug.exceptions import HTTPException #if isinstance(e, HTTPException) and (500 <= e.code < 600): # return error if not hasattr(error, 'code'):# or isinstance(error, HTTPException): error.code = 500 from werkzeug.exceptions import default_exceptions if error.code in default_exceptions: # Returning directly as below results in missing Location header # on 301 errors which is useful for this test as it will fail to redirect. def get_http_error_code(error_code=500): return { 301: u'Moved Permanently', 302: u'Found', 303: u'See Other', 304: u'Not Modified', 400: u'Bad request', 401: u'Unauthorized', 403: u'Forbidden', 404: u'Resource not found', 405: u'Method not allowed', 408: u'Request Timeout', 409: u'Conflict', 410: u'Gone', 418: u'I am a teapot', 429: u'Too many requests', 500: u'Internal server error', 501: u'Not Implemented', 502: u'Bad Gateway', 503: u'Service unavailable', 504: u'Gateway Timeout' }.get(error_code, 500) from flask import jsonify if not hasattr(error, 'original_exception'): error.original_exception = error or None return jsonify(code=get_http_error_code(error.code), description=error.description, message=str(error.original_exception), error=error.code), error.code return handle_http_exception(error) return ret_val # Override the HTTP exception handler. app.config['TRAP_HTTP_EXCEPTIONS'] = True #TRAP_BAD_REQUEST_ERRORS = PROPAGATE_EXCEPTIONS = True app.handle_http_exception = get_http_exception_handler(app) from werkzeug.exceptions import default_exceptions #werkzeug import HTTP_STATUS_CODES for exc in default_exceptions: #exc in HTTPException.__subclasses__() | exc in HTTP_STATUS_CODES app.register_error_handler(exc, get_http_exception_handler(app)) app.register_error_handler(Exception, get_http_exception_handler(app)) #if request.path.startswith('/api/'): return jsonify_error(ex) #else: return ex import logging handler = logging.FileHandler('static/flask.log')#RotatingFileHandler('flask.log', maxBytes=1024 * 1024 * 100, backupCount=3) handler.setLevel(logging.DEBUG if app.config['DEBUG'] else logging.INFO) handler.setFormatter(logging.Formatter('[%(levelname)s|%(filename)s:%(lineno)s] ' '%(asctime)s %(message)s \r\n')) app.logger.addHandler(handler) # Blueprints from app import register register(app) return app, SQLAlchemy(app) app, db = create_app() migrate = Migrate(app, db) manager = Manager(app) manager.add_command('db', MigrateCommand) manager.add_command('debug', Server(host='127.0.0.1', port=8080, use_debugger=True)) if __name__ == '__main__': db.create_all() manager.run() app.run(debug=app.config['DEBUG'], use_reloader=app.config['DEBUG'], port=int(get_env('PORT', 5000)), host='0.0.0.0') #https://gist.github.com/rochacbruno/b1fe0ccab1a81804def887e8ed40da57 #https://gist.github.com/rochacbruno/e44c1f0f43e89093bf7ddba77ee9feef
python
# engine/interfaces.py # Copyright (C) 2005-2020 the SQLAlchemy authors and contributors # <see AUTHORS file> # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php """Define core interfaces used by the engine system.""" from .. import util from ..sql.compiler import Compiled # noqa from ..sql.compiler import TypeCompiler # noqa class Dialect(object): """Define the behavior of a specific database and DB-API combination. Any aspect of metadata definition, SQL query generation, execution, result-set handling, or anything else which varies between databases is defined under the general category of the Dialect. The Dialect acts as a factory for other database-specific object implementations including ExecutionContext, Compiled, DefaultGenerator, and TypeEngine. .. note:: Third party dialects should not subclass :class:`.Dialect` directly. Instead, subclass :class:`.default.DefaultDialect` or descendant class. All dialects include the following attributes. There are many other attributes that may be supported as well: ``name`` identifying name for the dialect from a DBAPI-neutral point of view (i.e. 'sqlite') ``driver`` identifying name for the dialect's DBAPI ``positional`` True if the paramstyle for this Dialect is positional. ``paramstyle`` the paramstyle to be used (some DB-APIs support multiple paramstyles). ``encoding`` type of encoding to use for unicode, usually defaults to 'utf-8'. ``statement_compiler`` a :class:`.Compiled` class used to compile SQL statements ``ddl_compiler`` a :class:`.Compiled` class used to compile DDL statements ``server_version_info`` a tuple containing a version number for the DB backend in use. This value is only available for supporting dialects, and is typically populated during the initial connection to the database. ``default_schema_name`` the name of the default schema. This value is only available for supporting dialects, and is typically populated during the initial connection to the database. ``execution_ctx_cls`` a :class:`.ExecutionContext` class used to handle statement execution ``execute_sequence_format`` either the 'tuple' or 'list' type, depending on what cursor.execute() accepts for the second argument (they vary). ``preparer`` a :class:`~sqlalchemy.sql.compiler.IdentifierPreparer` class used to quote identifiers. ``supports_alter`` ``True`` if the database supports ``ALTER TABLE`` - used only for generating foreign key constraints in certain circumstances ``max_identifier_length`` The maximum length of identifier names. ``supports_sane_rowcount`` Indicate whether the dialect properly implements rowcount for ``UPDATE`` and ``DELETE`` statements. ``supports_sane_multi_rowcount`` Indicate whether the dialect properly implements rowcount for ``UPDATE`` and ``DELETE`` statements when executed via executemany. ``preexecute_autoincrement_sequences`` True if 'implicit' primary key functions must be executed separately in order to get their value. This is currently oriented towards PostgreSQL. ``implicit_returning`` use RETURNING or equivalent during INSERT execution in order to load newly generated primary keys and other column defaults in one execution, which are then available via inserted_primary_key. If an insert statement has returning() specified explicitly, the "implicit" functionality is not used and inserted_primary_key will not be available. ``colspecs`` A dictionary of TypeEngine classes from sqlalchemy.types mapped to subclasses that are specific to the dialect class. This dictionary is class-level only and is not accessed from the dialect instance itself. ``supports_default_values`` Indicates if the construct ``INSERT INTO tablename DEFAULT VALUES`` is supported ``supports_sequences`` Indicates if the dialect supports CREATE SEQUENCE or similar. ``sequences_optional`` If True, indicates if the "optional" flag on the Sequence() construct should signal to not generate a CREATE SEQUENCE. Applies only to dialects that support sequences. Currently used only to allow PostgreSQL SERIAL to be used on a column that specifies Sequence() for usage on other backends. ``supports_native_enum`` Indicates if the dialect supports a native ENUM construct. This will prevent types.Enum from generating a CHECK constraint when that type is used. ``supports_native_boolean`` Indicates if the dialect supports a native boolean construct. This will prevent types.Boolean from generating a CHECK constraint when that type is used. ``dbapi_exception_translation_map`` A dictionary of names that will contain as values the names of pep-249 exceptions ("IntegrityError", "OperationalError", etc) keyed to alternate class names, to support the case where a DBAPI has exception classes that aren't named as they are referred to (e.g. IntegrityError = MyException). In the vast majority of cases this dictionary is empty. .. versionadded:: 1.0.5 """ _has_events = False def create_connect_args(self, url): """Build DB-API compatible connection arguments. Given a :class:`.URL` object, returns a tuple consisting of a ``(*args, **kwargs)`` suitable to send directly to the dbapi's connect function. The arguments are sent to the :meth:`.Dialect.connect` method which then runs the DBAPI-level ``connect()`` function. The method typically makes use of the :meth:`.URL.translate_connect_args` method in order to generate a dictionary of options. The default implementation is:: def create_connect_args(self, url): opts = url.translate_connect_args() opts.update(url.query) return [[], opts] :param url: a :class:`.URL` object :return: a tuple of ``(*args, **kwargs)`` which will be passed to the :meth:`.Dialect.connect` method. .. seealso:: :meth:`.URL.translate_connect_args` """ raise NotImplementedError() @classmethod def type_descriptor(cls, typeobj): """Transform a generic type to a dialect-specific type. Dialect classes will usually use the :func:`_types.adapt_type` function in the types module to accomplish this. The returned result is cached *per dialect class* so can contain no dialect-instance state. """ raise NotImplementedError() def initialize(self, connection): """Called during strategized creation of the dialect with a connection. Allows dialects to configure options based on server version info or other properties. The connection passed here is a SQLAlchemy Connection object, with full capabilities. The initialize() method of the base dialect should be called via super(). """ pass def reflecttable( self, connection, table, include_columns, exclude_columns, resolve_fks ): """Load table description from the database. Given a :class:`_engine.Connection` and a :class:`~sqlalchemy.schema.Table` object, reflect its columns and properties from the database. The implementation of this method is provided by :meth:`.DefaultDialect.reflecttable`, which makes use of :class:`_reflection.Inspector` to retrieve column information. Dialects should **not** seek to implement this method, and should instead implement individual schema inspection operations such as :meth:`.Dialect.get_columns`, :meth:`.Dialect.get_pk_constraint`, etc. """ raise NotImplementedError() def get_columns(self, connection, table_name, schema=None, **kw): """Return information about columns in `table_name`. Given a :class:`_engine.Connection`, a string `table_name`, and an optional string `schema`, return column information as a list of dictionaries with these keys: * ``name`` - the column's name * ``type`` - [sqlalchemy.types#TypeEngine] * ``nullable`` - boolean * ``default`` - the column's default value * ``autoincrement`` - boolean * ``sequence`` - a dictionary of the form {'name' : str, 'start' :int, 'increment': int, 'minvalue': int, 'maxvalue': int, 'nominvalue': bool, 'nomaxvalue': bool, 'cycle': bool, 'cache': int, 'order': bool} Additional column attributes may be present. """ raise NotImplementedError() @util.deprecated( "0.8", "The :meth:`.Dialect.get_primary_keys` method is deprecated and " "will be removed in a future release. Please refer to the " ":meth:`.Dialect.get_pk_constraint` method. ", ) def get_primary_keys(self, connection, table_name, schema=None, **kw): """Return information about primary keys in `table_name`.""" raise NotImplementedError() def get_pk_constraint(self, connection, table_name, schema=None, **kw): """Return information about the primary key constraint on table_name`. Given a :class:`_engine.Connection`, a string `table_name`, and an optional string `schema`, return primary key information as a dictionary with these keys: * ``constrained_columns`` - a list of column names that make up the primary key * ``name`` - optional name of the primary key constraint. """ raise NotImplementedError() def get_foreign_keys(self, connection, table_name, schema=None, **kw): """Return information about foreign_keys in `table_name`. Given a :class:`_engine.Connection`, a string `table_name`, and an optional string `schema`, return foreign key information as a list of dicts with these keys: * ``name`` - the constraint's name * ``constrained_columns`` - a list of column names that make up the foreign key * ``referred_schema`` - the name of the referred schema * ``referred_table`` - the name of the referred table * ``referred_columns`` - a list of column names in the referred table that correspond to constrained_columns """ raise NotImplementedError() def get_table_names(self, connection, schema=None, **kw): """Return a list of table names for `schema`.""" raise NotImplementedError() def get_temp_table_names(self, connection, schema=None, **kw): """Return a list of temporary table names on the given connection, if supported by the underlying backend. """ raise NotImplementedError() def get_view_names(self, connection, schema=None, **kw): """Return a list of all view names available in the database. :param schema: Optional, retrieve names from a non-default schema. """ raise NotImplementedError() def get_temp_view_names(self, connection, schema=None, **kw): """Return a list of temporary view names on the given connection, if supported by the underlying backend. """ raise NotImplementedError() def get_view_definition(self, connection, view_name, schema=None, **kw): """Return view definition. Given a :class:`_engine.Connection`, a string `view_name`, and an optional string `schema`, return the view definition. """ raise NotImplementedError() def get_indexes(self, connection, table_name, schema=None, **kw): """Return information about indexes in `table_name`. Given a :class:`_engine.Connection`, a string `table_name` and an optional string `schema`, return index information as a list of dictionaries with these keys: * ``name`` - the index's name * ``column_names`` - list of column names in order * ``unique`` - boolean """ raise NotImplementedError() def get_unique_constraints( self, connection, table_name, schema=None, **kw ): r"""Return information about unique constraints in `table_name`. Given a string `table_name` and an optional string `schema`, return unique constraint information as a list of dicts with these keys: * ``name`` - the unique constraint's name * ``column_names`` - list of column names in order * ``**kw`` - other options passed to the dialect's get_unique_constraints() method. .. versionadded:: 0.9.0 """ raise NotImplementedError() def get_check_constraints(self, connection, table_name, schema=None, **kw): r"""Return information about check constraints in `table_name`. Given a string `table_name` and an optional string `schema`, return check constraint information as a list of dicts with these keys: * ``name`` - the check constraint's name * ``sqltext`` - the check constraint's SQL expression * ``**kw`` - other options passed to the dialect's get_check_constraints() method. .. versionadded:: 1.1.0 """ raise NotImplementedError() def get_table_comment(self, connection, table_name, schema=None, **kw): r"""Return the "comment" for the table identified by `table_name`. Given a string `table_name` and an optional string `schema`, return table comment information as a dictionary with this key: text text of the comment Raises ``NotImplementedError`` for dialects that don't support comments. .. versionadded:: 1.2 """ raise NotImplementedError() def normalize_name(self, name): """convert the given name to lowercase if it is detected as case insensitive. This method is only used if the dialect defines requires_name_normalize=True. """ raise NotImplementedError() def denormalize_name(self, name): """convert the given name to a case insensitive identifier for the backend if it is an all-lowercase name. This method is only used if the dialect defines requires_name_normalize=True. """ raise NotImplementedError() def has_table(self, connection, table_name, schema=None): """Check the existence of a particular table in the database. Given a :class:`_engine.Connection` object and a string `table_name`, return True if the given table (possibly within the specified `schema`) exists in the database, False otherwise. """ raise NotImplementedError() def has_sequence(self, connection, sequence_name, schema=None): """Check the existence of a particular sequence in the database. Given a :class:`_engine.Connection` object and a string `sequence_name`, return True if the given sequence exists in the database, False otherwise. """ raise NotImplementedError() def _get_server_version_info(self, connection): """Retrieve the server version info from the given connection. This is used by the default implementation to populate the "server_version_info" attribute and is called exactly once upon first connect. """ raise NotImplementedError() def _get_default_schema_name(self, connection): """Return the string name of the currently selected schema from the given connection. This is used by the default implementation to populate the "default_schema_name" attribute and is called exactly once upon first connect. """ raise NotImplementedError() def do_begin(self, dbapi_connection): """Provide an implementation of ``connection.begin()``, given a DB-API connection. The DBAPI has no dedicated "begin" method and it is expected that transactions are implicit. This hook is provided for those DBAPIs that might need additional help in this area. Note that :meth:`.Dialect.do_begin` is not called unless a :class:`.Transaction` object is in use. The :meth:`.Dialect.do_autocommit` hook is provided for DBAPIs that need some extra commands emitted after a commit in order to enter the next transaction, when the SQLAlchemy :class:`_engine.Connection` is used in its default "autocommit" mode. :param dbapi_connection: a DBAPI connection, typically proxied within a :class:`.ConnectionFairy`. """ raise NotImplementedError() def do_rollback(self, dbapi_connection): """Provide an implementation of ``connection.rollback()``, given a DB-API connection. :param dbapi_connection: a DBAPI connection, typically proxied within a :class:`.ConnectionFairy`. """ raise NotImplementedError() def do_commit(self, dbapi_connection): """Provide an implementation of ``connection.commit()``, given a DB-API connection. :param dbapi_connection: a DBAPI connection, typically proxied within a :class:`.ConnectionFairy`. """ raise NotImplementedError() def do_close(self, dbapi_connection): """Provide an implementation of ``connection.close()``, given a DBAPI connection. This hook is called by the :class:`_pool.Pool` when a connection has been detached from the pool, or is being returned beyond the normal capacity of the pool. """ raise NotImplementedError() def create_xid(self): """Create a two-phase transaction ID. This id will be passed to do_begin_twophase(), do_rollback_twophase(), do_commit_twophase(). Its format is unspecified. """ raise NotImplementedError() def do_savepoint(self, connection, name): """Create a savepoint with the given name. :param connection: a :class:`_engine.Connection`. :param name: savepoint name. """ raise NotImplementedError() def do_rollback_to_savepoint(self, connection, name): """Rollback a connection to the named savepoint. :param connection: a :class:`_engine.Connection`. :param name: savepoint name. """ raise NotImplementedError() def do_release_savepoint(self, connection, name): """Release the named savepoint on a connection. :param connection: a :class:`_engine.Connection`. :param name: savepoint name. """ raise NotImplementedError() def do_begin_twophase(self, connection, xid): """Begin a two phase transaction on the given connection. :param connection: a :class:`_engine.Connection`. :param xid: xid """ raise NotImplementedError() def do_prepare_twophase(self, connection, xid): """Prepare a two phase transaction on the given connection. :param connection: a :class:`_engine.Connection`. :param xid: xid """ raise NotImplementedError() def do_rollback_twophase( self, connection, xid, is_prepared=True, recover=False ): """Rollback a two phase transaction on the given connection. :param connection: a :class:`_engine.Connection`. :param xid: xid :param is_prepared: whether or not :meth:`.TwoPhaseTransaction.prepare` was called. :param recover: if the recover flag was passed. """ raise NotImplementedError() def do_commit_twophase( self, connection, xid, is_prepared=True, recover=False ): """Commit a two phase transaction on the given connection. :param connection: a :class:`_engine.Connection`. :param xid: xid :param is_prepared: whether or not :meth:`.TwoPhaseTransaction.prepare` was called. :param recover: if the recover flag was passed. """ raise NotImplementedError() def do_recover_twophase(self, connection): """Recover list of uncommitted prepared two phase transaction identifiers on the given connection. :param connection: a :class:`_engine.Connection`. """ raise NotImplementedError() def do_executemany(self, cursor, statement, parameters, context=None): """Provide an implementation of ``cursor.executemany(statement, parameters)``. """ raise NotImplementedError() def do_execute(self, cursor, statement, parameters, context=None): """Provide an implementation of ``cursor.execute(statement, parameters)``. """ raise NotImplementedError() def do_execute_no_params( self, cursor, statement, parameters, context=None ): """Provide an implementation of ``cursor.execute(statement)``. The parameter collection should not be sent. """ raise NotImplementedError() def is_disconnect(self, e, connection, cursor): """Return True if the given DB-API error indicates an invalid connection. """ raise NotImplementedError() def connect(self, *cargs, **cparams): r"""Establish a connection using this dialect's DBAPI. The default implementation of this method is:: def connect(self, *cargs, **cparams): return self.dbapi.connect(*cargs, **cparams) The ``*cargs, **cparams`` parameters are generated directly from this dialect's :meth:`.Dialect.create_connect_args` method. This method may be used for dialects that need to perform programmatic per-connection steps when a new connection is procured from the DBAPI. :param \*cargs: positional parameters returned from the :meth:`.Dialect.create_connect_args` method :param \*\*cparams: keyword parameters returned from the :meth:`.Dialect.create_connect_args` method. :return: a DBAPI connection, typically from the :pep:`249` module level ``.connect()`` function. .. seealso:: :meth:`.Dialect.create_connect_args` :meth:`.Dialect.on_connect` """ def on_connect(self): """Return a callable which sets up a newly created DBAPI connection. The callable should accept a single argument "conn" which is the DBAPI connection itself. The inner callable has no return value. E.g.:: class MyDialect(default.DefaultDialect): # ... def on_connect(self): def do_on_connect(connection): connection.execute("SET SPECIAL FLAGS etc") return do_on_connect This is used to set dialect-wide per-connection options such as isolation modes, Unicode modes, etc. The "do_on_connect" callable is invoked by using the :meth:`_events.PoolEvents.first_connect` and :meth:`_events.PoolEvents.connect` event hooks, then unwrapping the DBAPI connection and passing it into the callable. The reason it is invoked for both events is so that any dialect-level initialization that occurs upon first connection, which also makes use of the :meth:`_events.PoolEvents.first_connect` method, will proceed after this hook has been called. This currently means the hook is in fact called twice for the very first connection in which a dialect creates; and once per connection afterwards. If None is returned, no event listener is generated. :return: a callable that accepts a single DBAPI connection as an argument, or None. .. seealso:: :meth:`.Dialect.connect` - allows the DBAPI ``connect()`` sequence itself to be controlled. """ return None def reset_isolation_level(self, dbapi_conn): """Given a DBAPI connection, revert its isolation to the default. Note that this is a dialect-level method which is used as part of the implementation of the :class:`_engine.Connection` and :class:`_engine.Engine` isolation level facilities; these APIs should be preferred for most typical use cases. .. seealso:: :meth:`_engine.Connection.get_isolation_level` - view current level :attr:`_engine.Connection.default_isolation_level` - view default level :paramref:`.Connection.execution_options.isolation_level` - set per :class:`_engine.Connection` isolation level :paramref:`_sa.create_engine.isolation_level` - set per :class:`_engine.Engine` isolation level """ raise NotImplementedError() def set_isolation_level(self, dbapi_conn, level): """Given a DBAPI connection, set its isolation level. Note that this is a dialect-level method which is used as part of the implementation of the :class:`_engine.Connection` and :class:`_engine.Engine` isolation level facilities; these APIs should be preferred for most typical use cases. .. seealso:: :meth:`_engine.Connection.get_isolation_level` - view current level :attr:`_engine.Connection.default_isolation_level` - view default level :paramref:`.Connection.execution_options.isolation_level` - set per :class:`_engine.Connection` isolation level :paramref:`_sa.create_engine.isolation_level` - set per :class:`_engine.Engine` isolation level """ raise NotImplementedError() def get_isolation_level(self, dbapi_conn): """Given a DBAPI connection, return its isolation level. When working with a :class:`_engine.Connection` object, the corresponding DBAPI connection may be procured using the :attr:`_engine.Connection.connection` accessor. Note that this is a dialect-level method which is used as part of the implementation of the :class:`_engine.Connection` and :class:`_engine.Engine` isolation level facilities; these APIs should be preferred for most typical use cases. .. seealso:: :meth:`_engine.Connection.get_isolation_level` - view current level :attr:`_engine.Connection.default_isolation_level` - view default level :paramref:`.Connection.execution_options.isolation_level` - set per :class:`_engine.Connection` isolation level :paramref:`_sa.create_engine.isolation_level` - set per :class:`_engine.Engine` isolation level """ raise NotImplementedError() @classmethod def get_dialect_cls(cls, url): """Given a URL, return the :class:`.Dialect` that will be used. This is a hook that allows an external plugin to provide functionality around an existing dialect, by allowing the plugin to be loaded from the url based on an entrypoint, and then the plugin returns the actual dialect to be used. By default this just returns the cls. .. versionadded:: 1.0.3 """ return cls @classmethod def load_provisioning(cls): """Set up the provision.py module for this dialect. For dialects that include a provision.py module that sets up provisioning followers, this method should initiate that process. A typical implementation would be:: @classmethod def load_provisioning(cls): __import__("mydialect.provision") The default method assumes a module named ``provision.py`` inside the owning package of the current dialect, based on the ``__module__`` attribute:: @classmethod def load_provisioning(cls): package = ".".join(cls.__module__.split(".")[0:-1]) try: __import__(package + ".provision") except ImportError: pass .. versionadded:: 1.3.14 """ @classmethod def engine_created(cls, engine): """A convenience hook called before returning the final :class:`_engine.Engine`. If the dialect returned a different class from the :meth:`.get_dialect_cls` method, then the hook is called on both classes, first on the dialect class returned by the :meth:`.get_dialect_cls` method and then on the class on which the method was called. The hook should be used by dialects and/or wrappers to apply special events to the engine or its components. In particular, it allows a dialect-wrapping class to apply dialect-level events. .. versionadded:: 1.0.3 """ pass class CreateEnginePlugin(object): """A set of hooks intended to augment the construction of an :class:`_engine.Engine` object based on entrypoint names in a URL. The purpose of :class:`.CreateEnginePlugin` is to allow third-party systems to apply engine, pool and dialect level event listeners without the need for the target application to be modified; instead, the plugin names can be added to the database URL. Target applications for :class:`.CreateEnginePlugin` include: * connection and SQL performance tools, e.g. which use events to track number of checkouts and/or time spent with statements * connectivity plugins such as proxies Plugins are registered using entry points in a similar way as that of dialects:: entry_points={ 'sqlalchemy.plugins': [ 'myplugin = myapp.plugins:MyPlugin' ] A plugin that uses the above names would be invoked from a database URL as in:: from sqlalchemy import create_engine engine = create_engine( "mysql+pymysql://scott:tiger@localhost/test?plugin=myplugin") Alternatively, the :paramref:`.create_engine.plugins" argument may be passed as a list to :func:`_sa.create_engine`:: engine = create_engine( "mysql+pymysql://scott:tiger@localhost/test", plugins=["myplugin"]) .. versionadded:: 1.2.3 plugin names can also be specified to :func:`_sa.create_engine` as a list The ``plugin`` argument supports multiple instances, so that a URL may specify multiple plugins; they are loaded in the order stated in the URL:: engine = create_engine( "mysql+pymysql://scott:tiger@localhost/" "test?plugin=plugin_one&plugin=plugin_twp&plugin=plugin_three") A plugin can receive additional arguments from the URL string as well as from the keyword arguments passed to :func:`_sa.create_engine`. The :class:`.URL` object and the keyword dictionary are passed to the constructor so that these arguments can be extracted from the url's :attr:`.URL.query` collection as well as from the dictionary:: class MyPlugin(CreateEnginePlugin): def __init__(self, url, kwargs): self.my_argument_one = url.query.pop('my_argument_one') self.my_argument_two = url.query.pop('my_argument_two') self.my_argument_three = kwargs.pop('my_argument_three', None) Arguments like those illustrated above would be consumed from the following:: from sqlalchemy import create_engine engine = create_engine( "mysql+pymysql://scott:tiger@localhost/" "test?plugin=myplugin&my_argument_one=foo&my_argument_two=bar", my_argument_three='bat') The URL and dictionary are used for subsequent setup of the engine as they are, so the plugin can modify their arguments in-place. Arguments that are only understood by the plugin should be popped or otherwise removed so that they aren't interpreted as erroneous arguments afterwards. When the engine creation process completes and produces the :class:`_engine.Engine` object, it is again passed to the plugin via the :meth:`.CreateEnginePlugin.engine_created` hook. In this hook, additional changes can be made to the engine, most typically involving setup of events (e.g. those defined in :ref:`core_event_toplevel`). .. versionadded:: 1.1 """ def __init__(self, url, kwargs): """Construct a new :class:`.CreateEnginePlugin`. The plugin object is instantiated individually for each call to :func:`_sa.create_engine`. A single :class:`_engine. Engine` will be passed to the :meth:`.CreateEnginePlugin.engine_created` method corresponding to this URL. :param url: the :class:`.URL` object. The plugin should inspect what it needs here as well as remove its custom arguments from the :attr:`.URL.query` collection. The URL can be modified in-place in any other way as well. :param kwargs: The keyword arguments passed to :func:`.create_engine`. The plugin can read and modify this dictionary in-place, to affect the ultimate arguments used to create the engine. It should remove its custom arguments from the dictionary as well. """ self.url = url def handle_dialect_kwargs(self, dialect_cls, dialect_args): """parse and modify dialect kwargs""" def handle_pool_kwargs(self, pool_cls, pool_args): """parse and modify pool kwargs""" def engine_created(self, engine): """Receive the :class:`_engine.Engine` object when it is fully constructed. The plugin may make additional changes to the engine, such as registering engine or connection pool events. """ class ExecutionContext(object): """A messenger object for a Dialect that corresponds to a single execution. ExecutionContext should have these data members: connection Connection object which can be freely used by default value generators to execute SQL. This Connection should reference the same underlying connection/transactional resources of root_connection. root_connection Connection object which is the source of this ExecutionContext. This Connection may have close_with_result=True set, in which case it can only be used once. dialect dialect which created this ExecutionContext. cursor DB-API cursor procured from the connection, compiled if passed to constructor, sqlalchemy.engine.base.Compiled object being executed, statement string version of the statement to be executed. Is either passed to the constructor, or must be created from the sql.Compiled object by the time pre_exec() has completed. parameters bind parameters passed to the execute() method. For compiled statements, this is a dictionary or list of dictionaries. For textual statements, it should be in a format suitable for the dialect's paramstyle (i.e. dict or list of dicts for non positional, list or list of lists/tuples for positional). isinsert True if the statement is an INSERT. isupdate True if the statement is an UPDATE. should_autocommit True if the statement is a "committable" statement. prefetch_cols a list of Column objects for which a client-side default was fired off. Applies to inserts and updates. postfetch_cols a list of Column objects for which a server-side default or inline SQL expression value was fired off. Applies to inserts and updates. """ exception = None """A DBAPI-level exception that was caught when this ExecutionContext attempted to execute a statement. This attribute is meaningful only within the :meth:`_events.ConnectionEvents.dbapi_error` event. .. versionadded:: 0.9.7 .. seealso:: :attr:`.ExecutionContext.is_disconnect` :meth:`_events.ConnectionEvents.dbapi_error` """ is_disconnect = None """Boolean flag set to True or False when a DBAPI-level exception is caught when this ExecutionContext attempted to execute a statement. This attribute is meaningful only within the :meth:`_events.ConnectionEvents.dbapi_error` event. .. versionadded:: 0.9.7 .. seealso:: :attr:`.ExecutionContext.exception` :meth:`_events.ConnectionEvents.dbapi_error` """ def create_cursor(self): """Return a new cursor generated from this ExecutionContext's connection. Some dialects may wish to change the behavior of connection.cursor(), such as postgresql which may return a PG "server side" cursor. """ raise NotImplementedError() def pre_exec(self): """Called before an execution of a compiled statement. If a compiled statement was passed to this ExecutionContext, the `statement` and `parameters` datamembers must be initialized after this statement is complete. """ raise NotImplementedError() def post_exec(self): """Called after the execution of a compiled statement. If a compiled statement was passed to this ExecutionContext, the `last_insert_ids`, `last_inserted_params`, etc. datamembers should be available after this method completes. """ raise NotImplementedError() def result(self): """Return a result object corresponding to this ExecutionContext. Returns a ResultProxy. """ raise NotImplementedError() def handle_dbapi_exception(self, e): """Receive a DBAPI exception which occurred upon execute, result fetch, etc. """ raise NotImplementedError() def should_autocommit_text(self, statement): """Parse the given textual statement and return True if it refers to a "committable" statement """ raise NotImplementedError() def lastrow_has_defaults(self): """Return True if the last INSERT or UPDATE row contained inlined or database-side defaults. """ raise NotImplementedError() def get_rowcount(self): """Return the DBAPI ``cursor.rowcount`` value, or in some cases an interpreted value. See :attr:`_engine.ResultProxy.rowcount` for details on this. """ raise NotImplementedError() class Connectable(object): """Interface for an object which supports execution of SQL constructs. The two implementations of :class:`.Connectable` are :class:`_engine.Connection` and :class:`_engine.Engine`. Connectable must also implement the 'dialect' member which references a :class:`.Dialect` instance. """ def connect(self, **kwargs): """Return a :class:`_engine.Connection` object. Depending on context, this may be ``self`` if this object is already an instance of :class:`_engine.Connection`, or a newly procured :class:`_engine.Connection` if this object is an instance of :class:`_engine.Engine`. """ engine = None """The :class:`_engine.Engine` instance referred to by this :class:`.Connectable`. May be ``self`` if this is already an :class:`_engine.Engine`. """ @util.deprecated( "1.3", "The :meth:`_engine.Engine.contextual_connect` and " ":meth:`_engine.Connection.contextual_connect` methods are " "deprecated. This " "method is an artifact of the threadlocal engine strategy which is " "also to be deprecated. For explicit connections from an " ":class:`_engine.Engine`, use the :meth:`_engine.Engine.connect` " "method.", ) def contextual_connect(self, *arg, **kw): """Return a :class:`_engine.Connection` object which may be part of an ongoing context. Depending on context, this may be ``self`` if this object is already an instance of :class:`_engine.Connection`, or a newly procured :class:`_engine.Connection` if this object is an instance of :class:`_engine.Engine`. """ return self._contextual_connect(*arg, **kw) def _contextual_connect(self): raise NotImplementedError() @util.deprecated( "0.7", "The :meth:`.Connectable.create` method is deprecated and will be " "removed in a future release. Please use the ``.create()`` method " "on specific schema objects to emit DDL sequences, including " ":meth:`_schema.Table.create`, :meth:`.Index.create`, and " ":meth:`_schema.MetaData.create_all`.", ) def create(self, entity, **kwargs): """Emit CREATE statements for the given schema entity.""" raise NotImplementedError() @util.deprecated( "0.7", "The :meth:`.Connectable.drop` method is deprecated and will be " "removed in a future release. Please use the ``.drop()`` method " "on specific schema objects to emit DDL sequences, including " ":meth:`_schema.Table.drop`, :meth:`.Index.drop`, and " ":meth:`_schema.MetaData.drop_all`.", ) def drop(self, entity, **kwargs): """Emit DROP statements for the given schema entity.""" raise NotImplementedError() def execute(self, object_, *multiparams, **params): """Executes the given construct and returns a """ """:class:`_engine.ResultProxy`.""" raise NotImplementedError() def scalar(self, object_, *multiparams, **params): """Executes and returns the first column of the first row. The underlying cursor is closed after execution. """ raise NotImplementedError() def _run_visitor(self, visitorcallable, element, **kwargs): raise NotImplementedError() def _execute_clauseelement(self, elem, multiparams=None, params=None): raise NotImplementedError() class ExceptionContext(object): """Encapsulate information about an error condition in progress. This object exists solely to be passed to the :meth:`_events.ConnectionEvents.handle_error` event, supporting an interface that can be extended without backwards-incompatibility. .. versionadded:: 0.9.7 """ connection = None """The :class:`_engine.Connection` in use during the exception. This member is present, except in the case of a failure when first connecting. .. seealso:: :attr:`.ExceptionContext.engine` """ engine = None """The :class:`_engine.Engine` in use during the exception. This member should always be present, even in the case of a failure when first connecting. .. versionadded:: 1.0.0 """ cursor = None """The DBAPI cursor object. May be None. """ statement = None """String SQL statement that was emitted directly to the DBAPI. May be None. """ parameters = None """Parameter collection that was emitted directly to the DBAPI. May be None. """ original_exception = None """The exception object which was caught. This member is always present. """ sqlalchemy_exception = None """The :class:`sqlalchemy.exc.StatementError` which wraps the original, and will be raised if exception handling is not circumvented by the event. May be None, as not all exception types are wrapped by SQLAlchemy. For DBAPI-level exceptions that subclass the dbapi's Error class, this field will always be present. """ chained_exception = None """The exception that was returned by the previous handler in the exception chain, if any. If present, this exception will be the one ultimately raised by SQLAlchemy unless a subsequent handler replaces it. May be None. """ execution_context = None """The :class:`.ExecutionContext` corresponding to the execution operation in progress. This is present for statement execution operations, but not for operations such as transaction begin/end. It also is not present when the exception was raised before the :class:`.ExecutionContext` could be constructed. Note that the :attr:`.ExceptionContext.statement` and :attr:`.ExceptionContext.parameters` members may represent a different value than that of the :class:`.ExecutionContext`, potentially in the case where a :meth:`_events.ConnectionEvents.before_cursor_execute` event or similar modified the statement/parameters to be sent. May be None. """ is_disconnect = None """Represent whether the exception as occurred represents a "disconnect" condition. This flag will always be True or False within the scope of the :meth:`_events.ConnectionEvents.handle_error` handler. SQLAlchemy will defer to this flag in order to determine whether or not the connection should be invalidated subsequently. That is, by assigning to this flag, a "disconnect" event which then results in a connection and pool invalidation can be invoked or prevented by changing this flag. .. note:: The pool "pre_ping" handler enabled using the :paramref:`_sa.create_engine.pool_pre_ping` parameter does **not** consult this event before deciding if the "ping" returned false, as opposed to receiving an unhandled error. For this use case, the :ref:`legacy recipe based on engine_connect() may be used <pool_disconnects_pessimistic_custom>`. A future API allow more comprehensive customization of the "disconnect" detection mechanism across all functions. """ invalidate_pool_on_disconnect = True """Represent whether all connections in the pool should be invalidated when a "disconnect" condition is in effect. Setting this flag to False within the scope of the :meth:`_events.ConnectionEvents.handle_error` event will have the effect such that the full collection of connections in the pool will not be invalidated during a disconnect; only the current connection that is the subject of the error will actually be invalidated. The purpose of this flag is for custom disconnect-handling schemes where the invalidation of other connections in the pool is to be performed based on other conditions, or even on a per-connection basis. .. versionadded:: 1.0.3 """
python
import tensorflow as tf import numpy as np def set_gpu(config_str): import os os.environ["CUDA_VISIBLE_DEVICES"] = config_str ########################################################### #define weight and bias initialization def weight(shape,data=None,dtype=None): if dtype is None: dtype = tf.float32 if data is not None: w = tf.get_variable('weight',shape,initializer=tf.constant_initializer(data),dtype=dtype) else: w = tf.get_variable('weight',shape,initializer=tf.contrib.layers.xavier_initializer(),dtype=dtype) return w def weight_conv(shape,data=None,dtype=None): if dtype is None: dtype = tf.float32 if data is not None: k = tf.get_variable('kernel',shape,initializer=tf.constant_initializer(data),dtype=dtype) else: k = tf.get_variable('kernel',shape,initializer=tf.contrib.layers.xavier_initializer_conv2d(),dtype=dtype) return k def bias(shape,name='bias',value=0.0,dtype=None,trainable=True): if dtype is None: dtype = tf.float32 b = tf.get_variable(name=name,shape=shape,initializer=tf.constant_initializer(value),dtype=dtype,trainable=trainable) return b ########################################################### #define layer class class Layer(tf.contrib.checkpoint.Checkpointable): def __init__(self, name): # template for layer definition self.initialized = False self.variables = [] if not name is None: with tf.variable_scope(name): if not tf.executing_eagerly(): self._parse_args() self._initialize() self.initialized = True self.output = self._deploy() else: if not tf.executing_eagerly(): self._parse_args() self._initialize() self.initialized = True self.output = self._deploy() def _add_variable(self,var): # if not hasattr(self,'variables'): # self.variables = [] self.variables.append(var) def _initialize(self): pass def _parse_args(self): pass def __call__(self, x): self.x = tf.convert_to_tensor(x) if not self.initialized: self._parse_args() self._initialize() self.initialized = True return self._deploy() ########################################################### #define basic layers class conv2D(Layer): def __init__(self,size,outchn,x=None,name=None,stride=1,pad='SAME',usebias=True,values=None,kernel_data=None,bias_data=None,dilation_rate=1,weight_norm=False): self.x = x self.size = size self.outchn = outchn self.name = name self.stride = stride self.pad = pad self.usebias = usebias if values is None: self.kernel_data = None self.bias_data = None else: self.kernel_data = values[0] self.bias_data = values[1] self.dilation_rate = dilation_rate self.weight_norm = weight_norm super(conv2D, self).__init__(name) def _parse_args(self): # set size inchannel = self.x.get_shape().as_list()[-1] if isinstance(self.size,list): self.size = [self.size[0],self.size[1],inchannel,self.outchn] else: self.size = [self.size, self.size, inchannel, self.outchn] # set stride if isinstance(self.stride,list): self.stride = [1,self.stride[0],self.stride[1],1] else: self.stride = [1,self.stride, self.stride, 1] # set dilation if isinstance(self.dilation_rate,list): self.dilation_rate = [1,self.dilation_rate[0],self.dilation_rate[1],1] else: self.dilation_rate = [1,self.dilation_rate,self.dilation_rate,1] def _initialize(self): # this will enlarge ckpt size. (at first time) if self.kernel_data is not None: self.W = weight_conv(self.kernel_data.shape, self.kernel_data) else: self.W = weight_conv(self.size) if self.weight_norm: print('Enable weight norm') self.W = self.W.initialized_value() self.W = tf.nn.l2_normalize(self.W, [0,1,2]) print('Initialize weight norm') x_init = tf.nn.conv2d(self.x,self.W,stride,pad,dilations=dilation_rate) m_init, v_init = tf.nn.moments(x_init,[0,1,2]) s_init = 1. / tf.sqrt(v_init + 1e-8) s = tf.get_variable('weight_scale',dtype=tf.float32,initializer=s_init) self.S = s.initialized_value() self.S = tf.reshape(self.S,[1,1,1,outchn]) self.W = self.S *self.W self._add_variable(self.S) self._add_variable(self.W) # if self.usebias: if self.bias_data is not None: self.b = bias([self.outchn], value=self.bias_data) else: self.b = bias([self.outchn]) self._add_variable(self.b) def _deploy(self): out = tf.nn.conv2d(self.x,self.W,self.stride,self.pad,dilations=self.dilation_rate) if self.usebias: out = tf.nn.bias_add(out,self.b) return out class conv1D(Layer): def __init__(self,size,outchn,x=None,name=None,stride=1,pad='SAME',usebias=True,values=None,kernel_data=None,bias_data=None,dilation_rate=1,weight_norm=False): self.x = x self.size = size self.outchn = outchn self.name = name self.stride = stride self.pad = pad self.usebias = usebias if values is None: self.kernel_data = None self.bias_data = None else: self.kernel_data = values[0] self.bias_data = values[1] self.dilation_rate = dilation_rate self.weight_norm = weight_norm super(conv1D, self).__init__(name) def _parse_args(self): # set size inchannel = self.x.get_shape().as_list()[-1] self.size = [1, self.size, inchannel, self.outchn] # set stride self.stride = [1,1, self.stride, 1] # set dilation self.dilation_rate = [1,1,self.dilation_rate,1] def _initialize(self): # this will enlarge ckpt size. (at first time) if self.kernel_data is not None: self.W = weight_conv(self.kernel_data.shape, self.kernel_data) else: self.W = weight_conv(self.size) if self.weight_norm: print('Enable weight norm') self.W = self.W.initialized_value() self.W = tf.nn.l2_normalize(self.W, [0,1,2]) print('Initialize weight norm') x_init = tf.nn.conv2d(self.x,self.W,stride,pad,dilations=dilation_rate) m_init, v_init = tf.nn.moments(x_init,[0,1,2]) s_init = 1. / tf.sqrt(v_init + 1e-8) s = tf.get_variable('weight_scale',dtype=tf.float32,initializer=s_init) self.S = s.initialized_value() self.S = tf.reshape(self.S,[1,1,1,outchn]) self.W = self.S *self.W self._add_variable(self.S) self._add_variable(self.W) # if self.usebias: if self.bias_data is not None: self.b = bias([self.outchn], value=self.bias_data) else: self.b = bias([self.outchn]) self._add_variable(self.b) def _deploy(self): self.x = tf.expand_dims(self.x, axis=1) out = tf.nn.conv2d(self.x,self.W,self.stride,self.pad,dilations=self.dilation_rate) if self.usebias: out = tf.nn.bias_add(out,self.b) out = tf.squeeze(out, axis=1) return out class maxpoolLayer(Layer): def __init__(self,size,x=None,stride=None,name=None,pad='SAME'): self.x = x self.name = name self.size = size self.stride = stride self.pad = pad super(maxpoolLayer, self).__init__(name) def _parse_args(self): if isinstance(self.size, list): if len(self.size)==2: self.size = [1, self.size[0], self.size[1], 1] elif isinstance(self.size, int): self.size = [1, self.size, self.size, 1] if not self.stride: self.stride = self.size elif isinstance(self.stride, list): if len(self.stride)==2: self.stride = [1,self.stride[0],self.stride[1],1] elif isinstance(self.stride, int): self.stride = [1, self.stride, self.stride, 1] def _deploy(self): return tf.nn.max_pool(self.x, ksize=self.size, strides=self.stride, padding=self.pad) class activation(Layer): def __init__(self, param, x=None, name=None, **kwarg): self.x = x self.param = param self.name = name self.kwarg = kwarg super(activation, self).__init__(name) def _deploy(self): if self.param == 0: res = tf.nn.relu(self.x) elif self.param == 1: if 'leaky' in self.kwarg: leaky = self.kwarg['leaky'] else: leaky = 0.2 res = tf.maximum(self.x,self.x*leaky) elif self.param == 2: res = tf.nn.elu(self.x) elif self.param == 3: res = tf.tanh(self.x) elif self.param == 4: shape = self.x.get_shape().as_list() res = tf.reshape(self.x,[-1,shape[1],shape[2],2,shape[-1]//2]) # potential bug in conv_net res = tf.reduce_max(res,axis=[3]) elif self.param == 5: shape = self.x.get_shape().as_list() res = tf.reduce_max(tf.reshape(self.x,[-1,2,shape[-1]//2]),axis=[1]) elif self.param == 6: res = tf.sigmoid(self.x) else: res = self.x return res class fcLayer(Layer): def __init__(self, outsize, usebias=True, x=None, values=None, name=None): self.x = x self.outsize = outsize self.usebias = usebias self.name = name self.values = values super(fcLayer, self).__init__(name) def _initialize(self): insize = self.x.get_shape().as_list()[-1] if self.values is not None: self.W = weight([insize, self.outsize], data=self.values[0]) else: self.W = weight([insize, self.outsize]) self._add_variable(self.W) if self.usebias: if self.values is not None: self.b = bias([self.outsize], value=self.values[1]) else: self.b = bias([self.outsize]) self._add_variable(self.b) def _deploy(self): res = tf.matmul(self.x, self.W) if self.usebias: res = tf.nn.bias_add(res, self.b) return res class batch_norm_graph(Layer): def __init__(self, training, epsilon, x=None, name=None): assert (not tf.executing_eagerly()),'batch_norm_graph can only run in graph mode' self.x = x self.training = training self.epsilon = epsilon self.name = name super(batch_norm_graph, self).__init__(name) def _deploy(self): # will modify this to lower api in later version if not self.epsilon is None: return tf.layers.batch_normalization(self.x,training=self.training,name=self.name,epsilon=self.epsilon) return tf.layers.batch_normalization(self.x,training=self.training,name=self.name) class batch_norm(Layer): def __init__(self, decay=0.01, epsilon=0.001, is_training=True, name=None, values=None): assert tf.executing_eagerly(),'batch_norm can only run in graph mode' self.name = name self.decay = decay self.epsilon = epsilon self.is_training = is_training self.values = values super(batch_norm, self).__init__(name) def _initialize(self): shape = self.x.get_shape().as_list()[-1] if self.values is None: self.moving_average = bias([shape],name='moving_average',value=0.0,trainable=False) self.variance = bias([shape],name='variance',value=1.0,trainable=False) self.gamma = bias([shape],name='gamma',value=1.0,trainable=True) self.beta = bias([shape],name='beta',value=0.0,trainable=True) else: self.moving_average = bias([shape],name='moving_average',value=self.values[0],trainable=False) self.variance = bias([shape],name='variance',value=self.values[1],trainable=False) self.gamma = bias([shape],name='gamma',value=self.values[2],trainable=True) self.beta = bias([shape],name='beta',value=self.values[3],trainable=True) def update(self,variable,value): delta = (variable - value) * self.decay variable.assign_sub(delta) def _deploy(self): inp_dim_num = len(self.x.get_shape().as_list()) if inp_dim_num==3: self.x = tf.expand_dims(self.x, axis=1) if self.is_training: res, mean, var = tf.nn.fused_batch_norm(self.x, self.gamma, self.beta, None, None, self.epsilon, is_training=self.is_training) self.update(self.moving_average, mean) self.update(self.variance, var) else: res, mean, var = tf.nn.fused_batch_norm(self.x, self.gamma, self.beta, self.moving_average, self.variance, self.epsilon, is_training=self.is_training) if inp_dim_num==3: res = tf.squeeze(res , axis=1) return res class deconv2D(Layer): def __init__(self,size,outchn,x=None,stride=1,usebias=True,pad='SAME',name=None): self.x = x self.size = size self.outchn = outchn self.name = name self.stride = stride self.pad = pad self.usebias = usebias super(deconv2D, self).__init__(name) def _parse_args(self): inp_size = self.x.get_shape().as_list() inchannel = inp_size[-1] if isinstance(self.size,list): self.size = [self.size[0],self.size[1],self.outchn,inchannel] else: self.size = [self.size, self.size, self.outchn, inchannel] if isinstance(self.stride, list): if len(self.stride)==2: self.stride = [1,self.stride[0],self.stride[1],1] elif isinstance(self.stride, int): self.stride = [1, self.stride, self.stride, 1] # infer the output shape if self.pad == 'SAME': self.output_shape = [tf.shape(self.x)[0], tf.shape(self.x)[1]*self.stride[1], tf.shape(self.x)[2]*self.stride[2], self.outchn] else: self.output_shape = [tf.shape(self.x)[0], tf.shape(self.x)[1]*self.stride[1]+self.size[0]-self.stride[1], tf.shape(self.x)[2]*self.stride[2]+self.size[1]-self.stride[2], self.outchn] def _initialize(self): self.W = weight_conv(self.size) self._add_variable(self.W) if self.usebias: self.b = bias([self.outchn]) self._add_variable(self.b) def _deploy(self): res = tf.nn.conv2d_transpose(self.x, self.W, self.output_shape, self.stride, padding=self.pad) if self.usebias: res = tf.nn.bias_add(res, self.b) return res class flatten(Layer): def __init__(self, x=None, name=None): self.x = x super(flatten, self).__init__(name) def _deploy(self): shape = self.x.get_shape().as_list() num = 1 for k in shape[1:]: num *= k res = tf.reshape(self.x, [-1, num]) return res ####### Functional layer ####### @tf.custom_gradient def gradient_reverse(x): def grad(dy): return -dy return x, grad
python
# 2020.09.06 # Problem Statement: # https://leetcode.com/problems/text-justification/ class Solution: def modified(self, temp, maxWidth, count_char, count_word, count_char_list): # check corner case, if only one word if count_word == 1: temp = temp + " "*(maxWidth-len(temp)) return temp # space amount stores for each space area, how many spaces need to be filled space_amount = [] # space_total represents how many spaces in total need to exist space_total = maxWidth - count_char # set a and b as temp to do the calculation a, b = space_total, count_word # complete space_amount for i in range(0, count_word-1): if a % (b-1) == 0: space_amount.append(int(a//(b-1))) a = a-int(a//(b-1)) b = b-1 else: space_amount.append(int(a//(b-1)+1)) a = a-int(a//(b-1)+1) b = b-1 # add spaces into the temp index = 0 for i in range(0, len(count_char_list)-1): index = index + count_char_list[i] temp = temp[: index] + " "*space_amount[i] + temp[index+1: ] index = index + space_amount[i] return temp def fullJustify(self, words: List[str], maxWidth: int) -> List[str]: # initialize answer to return and initialize temp answer = [] temp = "" # count_char stores for each line, how many chars (except spaces) are in # count_word stores for each line, how many words can be in # count_char_list stores the word length distribution in each line count_char = 0 count_word = 0 count_char_list = [] # do the greedy part, without consider about the spaces (only insert one space for now) for i in range(0, len(words)): if i == 0: temp = words[i] count_char = len(words[i]) count_word = 1 count_char_list.append(len(words[i])) else: if len(temp) + len(words[i]) < maxWidth: temp = temp + " " + words[i] count_char = count_char + len(words[i]) count_word = count_word + 1 count_char_list.append(len(words[i])) else: # modify the temp temp = self.modified(temp, maxWidth, count_char, count_word, count_char_list) # start a new string answer.append(temp) temp = words[i] # do some reset count_char = len(words[i]) count_word = 1 count_char_list = [] count_char_list.append(len(words[i])) # deal with the last line temp = temp + " "*(maxWidth-len(temp)) answer.append(temp) return answer
python
import sys, os, json import SteamUtil, ServiceUtil class AutoUpdater(): def __init__(self, config): self.APP_ID = config["app_id"] self.VERSION_FILE = config["version_file"] self.STEAM_API_KEY = config["steam_api_key"] self.STEAM_DIR = config["steamcmd_location"] self.STEAMCMD_EXE = config["steamcmd_exe"] self.GAME_DIR = config["game_dir"] self.GAME_PROCESS_NAME = config["process_name"] self.GAME_EXE = config["game_exe"] self.GAME_NAME = config["game_name"] self.CreateSteamManager() def CreateSteamManager(self): self.steam = SteamUtil.SteamManager(self.STEAM_API_KEY, self.APP_ID, self.GAME_DIR + self.VERSION_FILE) def GetGameServerVersion(self): v = self.steam.GetServerVersion() if(v): printStr = "Detected version {}" else: v = 0 printStr = "Error detecting version, using {}" print(printStr.format(v)) def CheckGameServerVersion(self): print("Getting latest version of {}...".format(self.GAME_NAME)) v = self.steam.CheckStatus() if(v): print("Up to date") return False elif(v == False): print("Game not up to date") return True else: print("Error contacting steam api server") return None def KillGameServer(self): print("Checking for instances of {}...".format(self.GAME_PROCESS_NAME)) p = ServiceUtil.ProcessUtil.GetProcessByName(self.GAME_PROCESS_NAME) if(p): print("Process found with id {}, killing...".format(str(p.pid))) ServiceUtil.ProcessUtil.KillProcess(p) print("Done") else: print("None found") def UpdateGameServer(self): print("Starting steamcmd to check for updates...") p = ServiceUtil.ProcessUtil.RunProcess(self.STEAM_DIR, self.STEAMCMD_EXE, True) print("Done") def StartGameServer(self): print("Starting {}...".format(self.GAME_NAME)) p = ServiceUtil.ProcessUtil.RunProcess(self.GAME_DIR, self.GAME_EXE) print("Done") def CheckGame(self): print("Checking {}...".format(self.GAME_NAME)) self.GetGameServerVersion() if(self.CheckGameServerVersion()): self.KillGameServer() self.UpdateGameServer() self.StartGameServer() print("Done checking {} for updates\n\n".format(self.GAME_NAME)) def GetConfig(): print("Reading in config file...") dir = os.path.dirname(os.path.realpath(sys.argv[0])) with open(os.path.join(dir, "config.json")) as f: print("Done\n") return json.load(f) if __name__ == "__main__": config = GetConfig() for game in config["Games"]: if "stream_api_key" not in game: game["steam_api_key"] = config["steam_api_key"] if "version_file" not in game: game["version_file"] = config["version_file"] if "steamcmd_location" not in game: game["steamcmd_location"] = config["steamcmd_location"] a = AutoUpdater(game) a.CheckGame()
python
# Copyright 2016-2021 The Van Valen Lab at the California Institute of # Technology (Caltech), with support from the Paul Allen Family Foundation, # Google, & National Institutes of Health (NIH) under Grant U24CA224309-01. # All rights reserved. # # Licensed under a modified Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.github.com/vanvalenlab/deepcell-toolbox/LICENSE # # The Work provided may be used for non-commercial academic purposes only. # For any other use of the Work, including commercial use, please contact: # vanvalenlab@gmail.com # # Neither the name of Caltech nor the names of its contributors may be used # to endorse or promote products derived from this software without specific # prior written permission. # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================ """Functions for pre- and post-processing image data""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import warnings import numpy as np import scipy.ndimage as nd from skimage.feature import peak_local_max from skimage.measure import label from skimage.morphology import remove_small_objects, h_maxima from skimage.morphology import disk, ball, square, cube, dilation from skimage.segmentation import relabel_sequential, watershed from deepcell_toolbox.utils import erode_edges, fill_holes def deep_watershed(outputs, radius=10, maxima_threshold=0.1, interior_threshold=0.01, maxima_smooth=0, interior_smooth=1, maxima_index=0, interior_index=-1, label_erosion=0, small_objects_threshold=0, fill_holes_threshold=0, pixel_expansion=None, maxima_algorithm='h_maxima', **kwargs): """Uses ``maximas`` and ``interiors`` to perform watershed segmentation. ``maximas`` are used as the watershed seeds for each object and ``interiors`` are used as the watershed mask. Args: outputs (list): List of [maximas, interiors] model outputs. Use `maxima_index` and `interior_index` if list is longer than 2, or if the outputs are in a different order. radius (int): Radius of disk used to search for maxima maxima_threshold (float): Threshold for the maxima prediction. interior_threshold (float): Threshold for the interior prediction. maxima_smooth (int): smoothing factor to apply to ``maximas``. Use ``0`` for no smoothing. interior_smooth (int): smoothing factor to apply to ``interiors``. Use ``0`` for no smoothing. maxima_index (int): The index of the maxima prediction in ``outputs``. interior_index (int): The index of the interior prediction in ``outputs``. label_erosion (int): Number of pixels to erode segmentation labels. small_objects_threshold (int): Removes objects smaller than this size. fill_holes_threshold (int): Maximum size for holes within segmented objects to be filled. pixel_expansion (int): Number of pixels to expand ``interiors``. maxima_algorithm (str): Algorithm used to locate peaks in ``maximas``. One of ``h_maxima`` (default) or ``peak_local_max``. ``peak_local_max`` is much faster but seems to underperform when given regious of ambiguous maxima. Returns: numpy.array: Integer label mask for instance segmentation. Raises: ValueError: ``outputs`` is not properly formatted. """ try: maximas = outputs[maxima_index] interiors = outputs[interior_index] except (TypeError, KeyError, IndexError): raise ValueError('`outputs` should be a list of at least two ' 'NumPy arryas of equal shape.') valid_algos = {'h_maxima', 'peak_local_max'} if maxima_algorithm not in valid_algos: raise ValueError('Invalid value for maxima_algorithm: {}. ' 'Must be one of {}'.format( maxima_algorithm, valid_algos)) total_pixels = maximas.shape[1] * maximas.shape[2] if maxima_algorithm == 'h_maxima' and total_pixels > 5000**2: warnings.warn('h_maxima peak finding algorithm was selected, ' 'but the provided image is larger than 5k x 5k pixels.' 'This will lead to slow prediction performance.') # Handle deprecated arguments min_distance = kwargs.pop('min_distance', None) if min_distance is not None: radius = min_distance warnings.warn('`min_distance` is now deprecated in favor of `radius`. ' 'The value passed for `radius` will be used.', DeprecationWarning) # distance_threshold vs interior_threshold distance_threshold = kwargs.pop('distance_threshold', None) if distance_threshold is not None: interior_threshold = distance_threshold warnings.warn('`distance_threshold` is now deprecated in favor of ' '`interior_threshold`. The value passed for ' '`distance_threshold` will be used.', DeprecationWarning) # detection_threshold vs maxima_threshold detection_threshold = kwargs.pop('detection_threshold', None) if detection_threshold is not None: maxima_threshold = detection_threshold warnings.warn('`detection_threshold` is now deprecated in favor of ' '`maxima_threshold`. The value passed for ' '`detection_threshold` will be used.', DeprecationWarning) if maximas.shape[:-1] != interiors.shape[:-1]: raise ValueError('All input arrays must have the same shape. ' 'Got {} and {}'.format( maximas.shape, interiors.shape)) if maximas.ndim not in {4, 5}: raise ValueError('maxima and interior tensors must be rank 4 or 5. ' 'Rank 4 is 2D data of shape (batch, x, y, c). ' 'Rank 5 is 3D data of shape (batch, frames, x, y, c).') input_is_3d = maximas.ndim > 4 # fill_holes is not supported in 3D if fill_holes_threshold and input_is_3d: warnings.warn('`fill_holes` is not supported for 3D data.') fill_holes_threshold = 0 label_images = [] for maxima, interior in zip(maximas, interiors): # squeeze out the channel dimension if passed maxima = nd.gaussian_filter(maxima[..., 0], maxima_smooth) interior = nd.gaussian_filter(interior[..., 0], interior_smooth) if pixel_expansion: fn = cube if input_is_3d else square interior = dilation(interior, selem=fn(pixel_expansion * 2 + 1)) # peak_local_max is much faster but has poorer performance # when dealing with more ambiguous local maxima if maxima_algorithm == 'peak_local_max': coords = peak_local_max( maxima, min_distance=radius, threshold_abs=maxima_threshold, exclude_border=kwargs.get('exclude_border', False)) markers = np.zeros_like(maxima) slc = tuple(coords[:, i] for i in range(coords.shape[1])) markers[slc] = 1 else: # Find peaks and merge equal regions fn = ball if input_is_3d else disk markers = h_maxima(image=maxima, h=maxima_threshold, selem=fn(radius)) markers = label(markers) label_image = watershed(-1 * interior, markers, mask=interior > interior_threshold, watershed_line=0) if label_erosion: label_image = erode_edges(label_image, label_erosion) # Remove small objects if small_objects_threshold: label_image = remove_small_objects(label_image, min_size=small_objects_threshold) # fill in holes that lie completely within a segmentation label if fill_holes_threshold > 0: label_image = fill_holes(label_image, size=fill_holes_threshold) # Relabel the label image label_image, _, _ = relabel_sequential(label_image) label_images.append(label_image) label_images = np.stack(label_images, axis=0) label_images = np.expand_dims(label_images, axis=-1) return label_images def deep_watershed_mibi(model_output, interior_model='pixelwise-interior', maxima_model='inner-distance', **kwargs): """DEPRECATED. Please use ``deep_watershed`` instead. Postprocessing function for multiplexed deep watershed models. Thresholds the inner distance prediction to find cell centroids, which are used to seed a marker based watershed of the pixelwise interior prediction. Args: model_output (dict): DeepWatershed model output. A dictionary containing key: value pairs with the transform name and the corresponding output. Currently supported keys: - inner_distance: Prediction for the inner distance transform. - outer_distance: Prediction for the outer distance transform. - fgbg: Foreground prediction for the foregound/background transform. - pixelwise_interior: Interior prediction for the interior/border/background transform. interior_model (str): Name of semantic head used to predict interior of each object. maxima_model (str): Name of semantic head used to predict maxima of each object. kwargs (dict): Keyword arguments for ``deep_watershed``. Returns: numpy.array: Uniquely labeled mask. Raises: ValueError: if ``interior_model`` or ``maxima_model`` is invalid. ValueError: if ``interior_model`` or ``maxima_model`` predictions do not have length 4 """ text = ('deep_watershed_mibi is deprecated and will be removed in a ' 'future version. Please use ' '`deepcell_toolbox.deep_watershed.deep_watershed` instead.') warnings.warn(text, DeprecationWarning) interior_model = str(interior_model).lower() maxima_model = str(maxima_model).lower() valid_model_names = {'inner-distance', 'outer-distance', 'fgbg-fg', 'pixelwise-interior'} zipped = zip(['interior_model', 'maxima_model'], [interior_model, maxima_model]) for name, model in zipped: if model not in valid_model_names: raise ValueError('{} must be one of {}, got {}'.format( name, valid_model_names, model)) arr = model_output[model] if len(arr.shape) != 4: raise ValueError('Model output must be of length 4. The {} {} ' 'output provided is of shape {}.'.format( name, model, arr.shape)) output = [model_output[maxima_model], model_output[interior_model]] label_images = deep_watershed(output, **kwargs) return label_images def deep_watershed_3D(*args, **kwargs): """DEPRECATED. Please use ``deep_watershed`` instead.""" text = ('deep_watershed_3d is deprecated and will be removed in a future ' 'version. Please use ' '`deepcell_toolbox.deep_watershed.deep_watershed` instead.') warnings.warn(text, DeprecationWarning) return deep_watershed(*args, **kwargs)
python
from stan import StanDict if __name__ == '__main__': dict_1 = StanDict() dict_2 = StanDict() dict_1['metric_1'] = 1 dict_1['metric_2'] = 2 dict_2['metric_3'] = 3 dict_2['metric_4'] = 4 print(dict_1) print(dict_2) print(dict_1 + dict_2) print(dict_1['missing_key'])
python
""" What if we wish to apply decorator for all the methods of a class?? It's possible with the help of class decorator. Limitation: Class decorator do not work for class methods and static methods Let's see how setattr works before we use class decorator Syntax : setattr(obj, var, val) Parameters : obj : Object whose which attribute is to be assigned. var : object attribute which has to be assigned. val : value with which variable is to be assigned. """ from functools import wraps def debug(func): msg = "Calling method: %s" % (func.__qualname__) @wraps(func) def wrapper_function(*args, **kwargs): # print(msg) return func(*args, **kwargs) return wrapper_function def debugmethods(cls): # vars(cls) -> {'add': <function TestClass.add at 0x0118EC40>,} # print(vars(cls)) for name, val in vars(cls).items(): if callable(val): setattr(cls, name, debug(val)) return cls @debugmethods class TestClass(object): def add(a, b): return a + b def sub(a, b): return a - b @classmethod def foo(cls): return 'foo' if __name__ == "__main__": print(TestClass.add(3, 4)) print(TestClass.sub(4, 3)) print(TestClass.foo()) # Class decorator is not working for cls methods
python
# -*- coding: utf-8 -*- from . import wizard_wxwork_contacts_sync from . import wizard_wxwork_sync_tag from . import wizard_wxwork_sync_user
python
import numpy as np import pandas as pd def calculate_ROIC(data): """gets a data frame with the following fields: OperatingIncome, TaxRate, LongTermDebt, CurrentDebt, StockholderEquity and Cash and calculate the ROIC of the company per year Arguments: data {pd.Dataframe} -- Dataframe with all needed columns """ nopat = data['OperatingIncomeLoss'] * (1 - data['TaxRate']) long_term_debt = data['LongTermDebt'].fillna(0) current_debt = data['CurrentDebt'].fillna(0) invested_capital = long_term_debt + current_debt + data['StockholdersEquity'] - data['Cash'] average_invested_capital = [None] for i in range(len(invested_capital))[1:]: average = (invested_capital.iloc[i] + invested_capital.iloc[i - 1]) / 2 average_invested_capital.append(average) roic_values = nopat.divide(average_invested_capital) roic = pd.Series([f"{round(100 * val, 2)}%" for val in roic_values], index=roic_values.index) return roic def calculate_cagr(start_value, end_value, years): if start_value <= 0 or end_value <= 0: return None cagr = ((end_value / start_value) ** (1 / years) - 1) return int(np.round(cagr * 100)) def calculate_cagr_of_time_series(input_series): if input_series.index[-1] == 'TTM': values = input_series.iloc[:-1] else: values = input_series current_year = values.index[-1] current_value = values.iloc[-1] periods = [] cagrs = [] for idx, value in enumerate(values.iloc[:-1]): periods.append(current_year - values.index[idx]) try: cagr = calculate_cagr(value, current_value, periods[-1]) cagrs.append(str(cagr)+"%") except: cagrs.append(None) cagrs.append(np.nan) columns = [str(period) + ' years' for period in periods] + ['now'] out = pd.DataFrame(columns=columns, index=['value', 'CAGR']) out.loc['value'] = values.values out.loc['CAGR'] = cagrs return out def calc_growth_at_normalized_PE(eps_ttm, normalized_pe_estimation, GR_estimation): ''' a nice valuation technique where we predict a fair price for the stock by projecting the stimated growth values, and then calculate it back (with a discount rate) ''' # calculate 12% dicount rate for 6 years future_eps = eps_ttm * np.power((1 + GR_estimation / 100.0), 6) discounted_eps = future_eps / np.power(1.12, 6) high_value = discounted_eps * normalized_pe_estimation # calculate 15% dicount rate for 6 years future_eps = eps_ttm * np.power((1 + GR_estimation / 100.0), 5) discounted_eps = future_eps / np.power(1.15, 5) low_value = discounted_eps * normalized_pe_estimation return low_value, high_value def calc_owner_earnings(last_year_data): ''' a valuation technique where we calculate the owner earnings from the buisness operation The assumption is that if the market cap is higher than 10 years of earnings, than the stock might be overpriced. the function gets the income statement data, and returns the owner earnings ''' balance = {} balance['income'] = last_year_data['NetIncomeLoss'] balance['tax'] = last_year_data['IncomeTaxExpenseBenefit'] balance['deprecation'] = last_year_data['DepreciationAndAmortization'] balance['recievables'] = last_year_data['IncreaseDecreaseInAccountsReceivable'] balance['payable'] = last_year_data['IncreaseDecreaseInAccountsPayable'] balance['capex'] = last_year_data['CapitalExpenditure'] for key in balance.keys(): if np.isnan(balance[key]): balance[key] = 0 if key in ['income', 'capex']: print('Not enough information for owner earnings calculation') return None owner_earnings = balance['income'] + balance['tax'] + balance['deprecation'] - \ balance['recievables'] + balance['payable'] - balance['capex'] return owner_earnings def DCF_FCF(latest_fcf, growth_rate=20): ''' Discounted Cash Flow model based on Free Cash Flow (As described in https://www.gurufocus.com/) The future cash flow is estimated based on a cash flow growth rate and a discount rate. All of the discounted future cash flow is added together to get the current intrinsic value of the company. We use a two-stage model when calculating a stock's intrinsic value - a growth stage with high growth and a terminal stage with slower growth Here I do the estimation twice with different growth rates to get a low / high bounds. ''' if latest_fcf <= 0: return None, None growth_rate /= 100 # change percents to fractions d = 0.12 # Discount rate terminal_growth_rate = 0.04 y1 = 10 # years at high growth rate y2 = 10 # years at the terminal stage accumulated_ratios = 0 for y in range(y1+1)[1:]: g_2_d_ratio = np.power((1 + growth_rate) / (1 + d), y) accumulated_ratios += g_2_d_ratio for y in range(y2+1)[1:]: terminal_ratio = np.power((1 + terminal_growth_rate) / (1 + d), y) accumulated_ratios += g_2_d_ratio * terminal_ratio high_DCF = latest_fcf * accumulated_ratios # do a lower estimation with slower growth rate low_growth_rate = max(0.05, growth_rate / 2) accumulated_ratios = 0 for y in range(y1+1)[1:]: g_2_d_ratio = np.power((1 + low_growth_rate) / (1 + d), y) accumulated_ratios += g_2_d_ratio for y in range(y2+1)[1:]: terminal_ratio = np.power((1 + terminal_growth_rate) / (1 + d), y) accumulated_ratios += g_2_d_ratio * terminal_ratio low_DCF = latest_fcf * accumulated_ratios return low_DCF, high_DCF
python
# -*- coding: utf-8 -*- #!/usr/bin/env python3 # -- Libraries ---------------------------------------------------------------- import pywapi import pprint import time # -- Confiurations ------------------------------------------------------------ # the city name you want to search CITY_NAME = 'York, YOR, United Kingdom' # this defines the update time (s) of the weather data UPDATE_TIME = 30 # -- Main Program ------------------------------------------------------------- # (no need to change contents below this line) # find the city_id from weather.com city_list = pywapi.get_loc_id_from_weather_com(CITY_NAME) cnt = city_list['count'] print 'Warning: we found ' + str(cnt) + ' possible cities, ' + \ 'We will use the first one!' for i in range(cnt): print city_list[i] city_id = city_list[0][0] # main loop while True: try: # retrive weather data from weather.com weather_info = pywapi.get_weather_from_weather_com(city_id) # print raw data (debug only) print '\nRaw Retrived Data:' pp = pprint.PrettyPrinter(indent=4) pp.pprint(weather_info) # get units units = {} units['distance'] = weather_info['units']['distance'] units['pressure'] = weather_info['units']['pressure'] units['rainfall'] = weather_info['units']['rainfall'] units['speed'] = weather_info['units']['speed'] units['temperature'] = weather_info['units']['temperature'] # output current condition print '[City]' print weather_info['location']['name'] print '\n[Current]' print 'Description: ' + weather_info['current_conditions']['text'] print 'Temperature: ' + weather_info['current_conditions']['temperature'] + \ ' ' + units['temperature'] print 'Humidity: ' + weather_info['current_conditions']['humidity'] +'%' print weather_info['current_conditions']['wind'] # forecasts print '\n[Forecast]' for i in weather_info['forecasts']: print i['date'] + ',' + i['day_of_week'] + ': ' \ + i['day']['brief_text'] + ', ' \ + i['low'] + '-' + i['high'] + ' ' + units['temperature'] # package data into a string weather_str = weather_info['location']['name'] + ';' \ + weather_info['current_conditions']['text'] + ';' \ + weather_info['current_conditions']['temperature'] + ';' \ + weather_info['current_conditions']['humidity'] + ';' \ + weather_info['forecasts'][0]['high'] + ';' \ + weather_info['forecasts'][0]['low'] + ';\n' except URLError: pass except: pass # sleep time.sleep(UPDATE_TIME)
python
#!/usr/bin/env python3 import sys from argparse import ArgumentParser from collections import defaultdict def parse_args(): p = ArgumentParser('Constructs vocabulary file.') p.add_argument( '--input', type=str, metavar='FILE', required=True, help='source corpus') p.add_argument( '--output', type=str, metavar='FILE', required=True, help='vocabulary file') p.add_argument( '--size', type=int, metavar='N', required=True, help='vocabulary size') args = p.parse_args() assert args.size > 3 return args def main(): args = parse_args() freq = defaultdict(int) num_lines = 0 with open(args.input) as fp: for line in fp: num_lines += 1 for word in line.split(): freq[word] += 1 freq_sorted = sorted(freq.items(), key=lambda x: x[1], reverse=True) num_unk = sum(x[1] for x in freq_sorted[args.size - 3:]) with open(args.output, 'w') as fp: print('<unk>', file=fp) print('<s>', file=fp) print('</s>', file=fp) for i, (key, val) in zip(range(3, args.size), freq_sorted): print('%s' % key, file=fp) if __name__ == '__main__': main()
python
class PointMath: """Math with points and lines""" # Taken from: # https://stackoverflow.com/questions/1811549/perpendicular-on-a-line-from-a-given-point/1811636#1811636 # Accessed November 21, 2017 def perpendicularIntersection(point, linePoint1, linePoint2): """ Return the point of intersection of the line that is perpendicular to the given line (defined by "linePoint1" and "linePoint2") and goes through "point" """ x1 = linePoint1[0] y1 = linePoint1[1] x2 = linePoint2[0] y2 = linePoint2[1] x3 = point[0] y3 = point[1] k = ((y2-y1) * (x3-x1) - (x2-x1) * (y3-y1)) / ((y2-y1)**2 + (x2-x1)**2) x4 = x3 - k * (y2-y1) y4 = y3 + k * (x2-x1) return (x4, y4) def pointInSegment(point, segmentPoint1, segmentPoint2): """ Return whether the given point is on the given line segment (assuming it is on the line that extends from the line segment) """ x = point[0] y = point[1] if x < segmentPoint1[0] and x < segmentPoint2[0]: return False if x > segmentPoint1[0] and x > segmentPoint2[0]: return False if y < segmentPoint1[1] and y < segmentPoint2[1]: return False if y > segmentPoint1[1] and y > segmentPoint2[1]: return False return True
python