commit
stringlengths 40
40
| subject
stringlengths 1
1.49k
| old_file
stringlengths 4
311
| new_file
stringlengths 4
311
| new_contents
stringlengths 1
29.8k
| old_contents
stringlengths 0
9.9k
| lang
stringclasses 3
values | proba
float64 0
1
|
---|---|---|---|---|---|---|---|
d3409629c120e366c9c7500bc111f61b13e74dc8 | Change port. | bot.py | bot.py | import os
import json
import requests
from flask import Flask
from flask_restful import Resource, Api, reqparse
from slackclient import SlackClient
app = Flask(__name__)
api = Api(app)
token = os.environ.get('SLACK_KEY')
sc = SlackClient(token)
print sc.api_call('api.test')
class RealName(Resource):
def user_ids(self):
r = requests.get(
'https://slack.com/api/groups.list?token={}'.format(token))
content = r.json()
return content.get('groups')[0].get('members')
def get_username(self, ids):
r = requests.get(
'https://slack.com/api/users.list?token={}'.format(token))
content = r.json().get('members')
names = []
for id in ids:
for user in content:
if id == user.get('id') and not user.get('deleted') and not user.get('is_bot'): # noqa
names.append(
{
'id': id,
'name': user.get('real_name'),
'images': user.get('profile').get('image_48')
}
)
return names
def get(self):
# return real_name from user id info from slack
ids = self.user_ids()
output = self.get_username(ids)
return output
api.add_resource(RealName, '/names')
class PostDM(Resource):
def post(self):
# expect user_id and message data from the client
parser = reqparse.RequestParser()
parser.add_argument('user_id')
parser.add_argument('visitor_name')
# assign data from request to variables
args = parser.parse_args()
user_id = args.get('user_id')
visitor_name = args.get('visitor_name')
if visitor_name:
message = 'You have a visitor called {} at the gate.'.format(
visitor_name)
else:
message = 'Hi! You have a visitor waiting for you.'
# returns a string - to be converted to dict later. Then retrieve
# channel ID
string_resp = sc.api_call('im.open', user=user_id)
dict_resp = json.loads(string_resp)
channelID = dict_resp.get('channel').get('id')
sc.api_call(
'chat.postMessage',
as_user='true:',
channel=channelID,
text=message
)
return {'message': 'Notification sent'}, 200
api.add_resource(PostDM, '/send')
if __name__ == '__main__':
port = int(os.environ.get('PORT', 5000))
app.run(host='0.0.0.0', port=port)
| import os
import json
import requests
from flask import Flask
from flask_restful import Resource, Api, reqparse
from slackclient import SlackClient
app = Flask(__name__)
api = Api(app)
token = os.environ.get('SLACK_KEY')
sc = SlackClient(token)
print sc.api_call('api.test')
class RealName(Resource):
def user_ids(self):
r = requests.get(
'https://slack.com/api/groups.list?token={}'.format(token))
content = r.json()
return content.get('groups')[0].get('members')
def get_username(self, ids):
r = requests.get(
'https://slack.com/api/users.list?token={}'.format(token))
content = r.json().get('members')
names = []
for id in ids:
for user in content:
if id == user.get('id') and not user.get('deleted') and not user.get('is_bot'): # noqa
names.append(
{
'id': id,
'name': user.get('real_name'),
'images': user.get('profile').get('image_48')
}
)
return names
def get(self):
# return real_name from user id info from slack
ids = self.user_ids()
output = self.get_username(ids)
return output
api.add_resource(RealName, '/names')
class PostDM(Resource):
def post(self):
# expect user_id and message data from the client
parser = reqparse.RequestParser()
parser.add_argument('user_id')
parser.add_argument('visitor_name')
# assign data from request to variables
args = parser.parse_args()
user_id = args.get('user_id')
visitor_name = args.get('visitor_name')
if visitor_name:
message = 'You have a visitor called {} at the gate.'.format(
visitor_name)
else:
message = 'Hi! You have a visitor waiting for you.'
# returns a string - to be converted to dict later. Then retrieve
# channel ID
string_resp = sc.api_call('im.open', user=user_id)
dict_resp = json.loads(string_resp)
channelID = dict_resp.get('channel').get('id')
sc.api_call(
'chat.postMessage',
as_user='true:',
channel=channelID,
text=message
)
return {'message': 'Notification sent'}, 200
api.add_resource(PostDM, '/send')
if __name__ == '__main__':
app.run(debug=True)
| Python | 0 |
9ab748de8ca86b2f62bda30c5f2f3f0b2bde7047 | Handle TimeoutException and improve code structure | bot.py | bot.py | import os
import re
import time
import json
import schedule
import config
from slackclient import SlackClient
from handlers import HandlerManager
from storage import Storage
BOT_ID = ''
sc = SlackClient(os.environ['SLACK_BOT_TOKEN'])
storage = Storage()
def post(channel, text, as_user=None):
if as_user is None:
as_user = True
sc.api_call("chat.postMessage", channel=channel, as_user=as_user, text=text)
def post_report(user, title, attachments):
sc.api_call("chat.postMessage",
channel=config.DAILY_MEETING_CHANNEL,
as_user=False,
username=user['name'],
icon_url=user['profile']['image_48'],
text=title,
attachments=json.dumps(attachments))
handler = HandlerManager(post, post_report)
# http://stackoverflow.com/a/42013042/3109776
def is_direct_message(output, own_id):
return output and \
'text' in output and \
'channel' in output and \
'type' in output and \
'user' in output and \
output['user'] != own_id and \
output['type'] == 'message' and \
output['channel'].startswith('D')
def fetch_messages():
try:
messages = sc.rtm_read()
if messages and len(messages) > 0:
for m in messages:
handle_message(m)
except TimeoutError:
pass
def handle_message(m):
if not is_direct_message(m, BOT_ID):
return
text, user_id, channel = m['text'], m['channel'], m['user']
if text and channel and user_id:
user = get_user(user_id)
handler.handle(channel, user, text)
storage.save_user(user)
"""Get the user cached in local storage or fetch from API (It'll be cached later)"""
def get_user(user_id):
user = storage.get_user(user_id, None)
# TODO: update this user from API once in while
if user:
return user
return sc.api_call("users.info", user=user_id)['user']
def resolve_bot_id():
res = sc.api_call("users.list")
if res.get('ok'):
users = res.get('members')
for user in users:
if 'name' in user and user.get('name') == config.BOT_NAME:
return user.get('id')
raise Exception("Failed to find bot named '{}'!".format(config.BOT_NAME))
def run_daily_meeting():
users = storage.get_users_for_daily_meeting()
print("Run daily meeting:")
for user in users:
print(user['name'])
channel = "@{}".format(user['name'])
first_name = re.split(" +", user['real_name'])[0].strip()
post(channel,
"Hi {}! Time for the standup metting. Please answer the following questions:"
.format(first_name))
handler.handle(channel, user, 'report')
storage.save_user(user)
if __name__ == "__main__":
if not sc.rtm_connect():
raise Exception("Connection failed! Please check your Slack Token")
BOT_ID = resolve_bot_id()
print("Bot {} connected and running!".format(BOT_ID))
schedule \
.every().day \
.at(config.TIME) \
.do(run_daily_meeting)
while True:
fetch_messages()
schedule.run_pending()
time.sleep(1)
| import os
import re
import time
import json
import schedule
import config
from slackclient import SlackClient
from handlers import HandlerManager
from storage import Storage
BOT_ID = ''
sc = SlackClient(os.environ['SLACK_BOT_TOKEN'])
storage = Storage()
def post(channel, text, as_user=None):
if as_user is None:
as_user = True
sc.api_call("chat.postMessage", channel=channel, as_user=as_user, text=text)
def post_report(user, title, attachments):
sc.api_call("chat.postMessage",
channel=config.DAILY_MEETING_CHANNEL,
as_user=False,
username=user['name'],
icon_url=user['profile']['image_48'],
text=title,
attachments=json.dumps(attachments))
handler = HandlerManager(post, post_report)
# http://stackoverflow.com/a/42013042/3109776
def is_direct_message(output, own_id):
return output and \
'text' in output and \
'channel' in output and \
'type' in output and \
'user' in output and \
output['user'] != own_id and \
output['type'] == 'message' and \
output['channel'].startswith('D')
def parse_output(output_list):
if output_list and len(output_list) > 0:
for output in output_list:
if is_direct_message(output, BOT_ID):
return output['text'], output['channel'], output['user']
return None, None, None
def resolve_bot_id():
res = sc.api_call("users.list")
if res.get('ok'):
users = res.get('members')
for user in users:
if 'name' in user and user.get('name') == config.BOT_NAME:
return user.get('id')
raise Exception("Failed to find bot named '{}'!".format(config.BOT_NAME))
def run_daily_meeting():
users = storage.get_users_for_daily_meeting()
print("Run daily meeting:")
for user in users:
print(user['name'])
channel = "@{}".format(user['name'])
first_name = re.split(" +", user['real_name'])[0].strip()
post(channel,
"Hi {}! Time for the standup metting. Please answer the following questions:"
.format(first_name))
handler.handle(channel, user, 'report')
storage.save_user(user)
if __name__ == "__main__":
if not sc.rtm_connect():
raise Exception("Connection failed! Please check your Slack Token")
BOT_ID = resolve_bot_id()
print("Bot {} connected and running!".format(BOT_ID))
schedule \
.every().day \
.at(config.TIME) \
.do(run_daily_meeting)
while True:
msg, channel, user_id = parse_output(sc.rtm_read())
if msg and channel and user_id:
user = sc.api_call("users.info", user=user_id)['user']
user = storage.get_user(user['id'], user)
handler.handle(channel, user, msg)
storage.save_user(user)
schedule.run_pending()
time.sleep(1)
| Python | 0 |
b367ff9e032d01f15aaaedc7e93446e9dda2649a | Fix outputing | bot.py | bot.py | import evaluation
import sys
settings = {}
current_grid = [[0]]
current_round = 0
me = -1
op = -1
def play(grid, column, color):
grid = [x[:] for x in grid]
for row in reversed(grid):
if row[column] == 0:
row[column] = color
return grid
# Can't play there
return None
def nodes(grid, player):
for i in range(settings['field_columns']):
new_grid = play(grid, i, player)
if new_grid:
yield i, new_grid
def minimax(grid, depth, is_max_player):
depth -= 1
if is_max_player:
best = evaluation.LOSE
for i, new_grid in nodes(grid, me):
current_value = evaluation.scan(new_grid, me ,op)
if current_value == evaluation.WIN or depth == 0:
return current_value
v = minimax(new_grid, depth, False)
best = max(best, v)
if best == evaluation.WIN:
break
return best
else:
best = evaluation.WIN
for i, new_grid in nodes(grid, op):
current_value = evaluation.scan(new_grid, me ,op)
if current_value == evaluation.LOSE or depth == 0:
return current_value
v = minimax(new_grid, depth, True)
best = min(best, v)
if best == evaluation.LOSE:
break
return best
first = True
if __name__ == '__main__':
while True:
line = raw_input()
if not line:
continue
content = line.split()
if content[0] == 'settings':
try:
settings[content[1]] = int(content[2])
except:
settings[content[1]] = content[2]
if content[1] == 'your_botid':
me = int(content[2])
# assuming the ids are always 1 and 2?
op = [2,1][me - 1]
elif content[0] == 'update':
if content[2] == 'field':
current_grid = [[int(x) for x in y.split(',')] for y in content[3].split(';')]
elif content[2] == 'round':
current_round = int(content[3])
elif content[0] == 'action':
if first:
first = False
sys.stdout.write(('place_disc %d' % (settings['field_columns'] // 2)) + '\n')
sys.stdout.flush()
continue
values = sorted((minimax(g, 3, False), i) for i, g in nodes(current_grid, me))
sys.stdout.write(('place_disc %d' % values[-1][1]) + '\n')
sys.stdout.flush()
# TODO get the remaining time?
# TODO get the per-turn time?
| import evaluation
settings = {}
current_grid = [[0]]
current_round = 0
me = -1
op = -1
def play(grid, column, color):
grid = [x[:] for x in grid]
for row in reversed(grid):
if row[column] == 0:
row[column] = color
return grid
# Can't play there
return None
def nodes(grid, player):
for i in range(settings['field_columns']):
new_grid = play(grid, i, player)
if new_grid:
yield i, new_grid
def minimax(grid, depth, is_max_player):
depth -= 1
if is_max_player:
best = evaluation.LOSE
for i, new_grid in nodes(grid, me):
current_value = evaluation.scan(new_grid, me ,op)
if current_value == evaluation.WIN or depth == 0:
return current_value
v = minimax(new_grid, depth, False)
best = max(best, v)
if best == evaluation.WIN:
break
return best
else:
best = evaluation.WIN
for i, new_grid in nodes(grid, op):
current_value = evaluation.scan(new_grid, me ,op)
if current_value == evaluation.LOSE or depth == 0:
return current_value
v = minimax(new_grid, depth, True)
best = min(best, v)
if best == evaluation.LOSE:
break
return best
if __name__ == '__main__':
while True:
line = raw_input()
if not line:
continue
content = line.split()
if content[0] == 'settings':
try:
settings[content[1]] = int(content[2])
except:
settings[content[1]] = content[2]
if content[1] == 'your_botid':
me = int(content[2])
# assuming the ids are always 1 and 2?
op = [2,1][me - 1]
elif content[0] == 'update':
if content[2] == 'field':
current_grid = [[int(x) for x in y.split(',')] for y in content[3].split(';')]
elif content[2] == 'round':
current_round = int(content[3])
elif content[0] == 'action':
if current_round == 1:
print('place_disk %d' % (settings['field_columns'] // 2))
continue
values = sorted((minimax(g, 2, False), i) for i, g in nodes(current_grid, me))
print('place_disk %d' % values[-1][1])
# TODO get the remaining time?
# TODO get the per-turn time?
| Python | 0.999986 |
fae0989a5dc6886b11896f6ba5c6484cd1c1f735 | Fix error on unknown command and blank game name | bot.py | bot.py | import asyncio
import discord
import text_adventure
class Bot(object):
def __init__(self, client, config):
self.client = client
self.config = config
self.game_obj = None
@asyncio.coroutine
def do_command(self, message, command, *args):
try:
yield from getattr(self, command)(message, *args)
except AttributeError:
pass
@asyncio.coroutine
def game(self, message, command, *args):
yield from getattr(self, 'game_' + command)(message, *args)
@asyncio.coroutine
def game_start(self, message, name):
if self.game_obj is not None:
return
self.game_obj = text_adventure.Game(self.config, name)
yield from self.client.change_presence(game = discord.Game(name = name))
yield from self.client.send_message(message.channel, self.game_obj.output())
@asyncio.coroutine
def game_input(self, message, inp):
if self.game_obj is None:
return
self.game_obj.inp(inp)
yield from self.client.send_message(message.channel, self.game_obj.output())
@asyncio.coroutine
def game_end(self, message):
if self.game_obj is None:
return
self.game_obj.stop()
self.game_obj = None
yield from self.client.change_presence(game = None)
@asyncio.coroutine
def parse_chatter(self, message):
if message.content.lower() == 'so' or ':so:' in message.content.lower():
yield from self.client.send_message(message.channel, 'so')
elif message.content.startswith(self.config['game_prefix']) and self.game_obj is not None:
yield from self.game_input(message, message.content[1:])
| import asyncio
import discord
import text_adventure
class Bot(object):
def __init__(self, client, config):
self.client = client
self.config = config
self.game_obj = None
@asyncio.coroutine
def do_command(self, message, command, *args):
yield from getattr(self, command)(message, *args)
@asyncio.coroutine
def game(self, message, command, *args):
yield from getattr(self, 'game_' + command)(message, *args)
@asyncio.coroutine
def game_start(self, message, name):
if self.game_obj is not None:
return
self.game_obj = text_adventure.Game(self.config, name)
yield from self.client.change_presence(game = discord.Game(name = name))
yield from self.client.send_message(message.channel, self.game_obj.output())
@asyncio.coroutine
def game_input(self, message, inp):
if self.game_obj is None:
return
self.game_obj.inp(inp)
yield from self.client.send_message(message.channel, self.game_obj.output())
@asyncio.coroutine
def game_end(self, message):
if self.game_obj is None:
return
self.game_obj.stop()
self.game_obj = None
yield from self.client.change_presence(game = discord.Game(name = ''))
@asyncio.coroutine
def parse_chatter(self, message):
if message.content.lower() == 'so' or ':so:' in message.content.lower():
yield from self.client.send_message(message.channel, 'so')
elif message.content.startswith(self.config['game_prefix']) and self.game_obj is not None:
yield from self.game_input(message, message.content[1:])
| Python | 0.000011 |
0958e4760264fcf232e655c47d88a03bf38896b0 | Renamed subreddit command to reddit | bot.py | bot.py | import praw
import discord
from discord.ext import commands
import os
from dotenv import load_dotenv, find_dotenv
load_dotenv(find_dotenv())
reddit = praw.Reddit(client_id = os.environ.get("REDDIT_CLIENT_ID"),
client_secret = os.environ.get("REDDIT_CLIENT_SECRET"),
user_agent = "aySH Bot")
print(reddit.read_only)
async def top_subreddit(subreddit, time):
tops = reddit.subreddit(subreddit).top(time, limit = 1)
for top in tops:
await client.say(top.url)
# Bot config
prefix = '!'
des = 'aySH'
client = commands.Bot(description=des, command_prefix=prefix)
# Make a startup command
@client.event
async def on_ready():
print("[*]I'm in")
print('[*] Name: {}'.format(client.user.name))
# subreddit
@client.command(pass_context=True)
async def reddit(ctx, subreddit = "all", time = "day"):
# await print(top)
await top_subreddit(subreddit, time)
# random
@client.command(pass_context=True)
async def random(ctx):
# await print(top)
await top_subreddit('random', 'all')
# probuild
@client.command(pass_context=True)
async def build(ctx, champion = "janna"):
await client.say('http://www.probuilds.net/champions/details/' + champion)
# counter
@client.command(pass_context=True)
async def counter(ctx, champion = "janna"):
await client.say('http://lolcounter.com/champions/' + champion)
client.run(os.environ.get("DISCORD_CLIENT_TOKEN"))
| import praw
import discord
from discord.ext import commands
import os
from dotenv import load_dotenv, find_dotenv
load_dotenv(find_dotenv())
reddit = praw.Reddit(client_id = os.environ.get("REDDIT_CLIENT_ID"),
client_secret = os.environ.get("REDDIT_CLIENT_SECRET"),
user_agent = "aySH Bot")
print(reddit.read_only)
async def top_subreddit(subreddit, time):
tops = reddit.subreddit(subreddit).top(time, limit = 1)
for top in tops:
await client.say(top.url)
# Bot config
prefix = '!'
des = 'aySH'
client = commands.Bot(description=des, command_prefix=prefix)
# Make a startup command
@client.event
async def on_ready():
print("[*]I'm in")
print('[*] Name: {}'.format(client.user.name))
# subreddit
@client.command(pass_context=True)
async def subreddit(ctx, subreddit = "all", time = "day"):
# await print(top)
await top_subreddit(subreddit, time)
# random
@client.command(pass_context=True)
async def random(ctx):
# await print(top)
await top_subreddit('random', 'all')
# probuild
@client.command(pass_context=True)
async def build(ctx, champion = "janna"):
await client.say('http://www.probuilds.net/champions/details/' + champion)
# counter
@client.command(pass_context=True)
async def counter(ctx, champion = "janna"):
await client.say('http://lolcounter.com/champions/' + champion)
client.run(os.environ.get("DISCORD_CLIENT_TOKEN")) | Python | 0.999985 |
af7af25ed5a13a4ce45f358ec5548c2f9e6a492e | remove wiki from DNL | bot.py | bot.py | import json
import traceback
from datetime import datetime
from pathlib import Path
import aiohttp
import aredis
import asyncpg
from discord.ext import commands
from utils.custom_context import CustomContext
class QTBot(commands.Bot):
def __init__(self, config_file, *args, **kwargs):
self.config_file = config_file
self.description = "qtbot is a big qt written in python3 and love."
self.do_not_load = ("league", "poll", "music", "timer", "ris", "timer")
with open(self.config_file) as f:
self.api_keys = json.load(f)
self.token = self.api_keys["discord"]
super().__init__(
command_prefix=self.get_prefix,
description=self.description,
help_command=commands.DefaultHelpCommand(dm_help=True),
case_insensitive=True,
*args,
**kwargs,
)
self.aio_session = aiohttp.ClientSession(loop=self.loop)
# self.rune_client = lolrune.AioRuneClient()
self.redis_client = aredis.StrictRedis(host="localhost", decode_responses=True)
self.startup_extensions = [x.stem for x in Path("cogs").glob("*.py")]
self.loop.run_until_complete(self.create_db_pool())
self.loop.run_until_complete(self.load_all_prefixes())
def run(self):
super().run(self.token)
async def load_all_prefixes(self):
pres = await self.pg_con.fetch("SELECT * from custom_prefix")
# Load custom prefixes into a dict
self.pre_dict = {r["guild_id"]: r["prefix"] for r in pres}
async def get_prefix(self, message):
try:
return ("qt.", self.pre_dict[message.guild.id])
except (KeyError, AttributeError):
return "qt."
async def create_db_pool(self):
with open(self.config_file) as f:
self.pg_pw = json.load(f)["postgres"]
self.pg_con = await asyncpg.create_pool(
user="james", password=self.pg_pw, database="discord_testing"
)
async def on_message(self, message):
ctx = await self.get_context(message, cls=CustomContext)
await self.invoke(ctx)
async def on_ready(self):
if not hasattr(self, "start_time"):
self.start_time = datetime.now()
self.start_time_str = self.start_time.strftime("%B %d %H:%M:%S")
for extension in self.startup_extensions:
if extension not in self.do_not_load:
try:
self.load_extension(f"cogs.{extension}")
except:
print(f"Failed Extension: {extension}")
traceback.print_exc()
else:
print(f"Loaded Extension: {extension}")
print(f"Client logged in at {self.start_time_str}")
print(self.user.name)
print(self.user.id)
print("----------")
| import json
import traceback
from datetime import datetime
from pathlib import Path
import aiohttp
import aredis
import asyncpg
from discord.ext import commands
from utils.custom_context import CustomContext
class QTBot(commands.Bot):
def __init__(self, config_file, *args, **kwargs):
self.config_file = config_file
self.description = "qtbot is a big qt written in python3 and love."
self.do_not_load = ("league", "poll", "music", "timer", "ris", "timer", "wiki")
with open(self.config_file) as f:
self.api_keys = json.load(f)
self.token = self.api_keys["discord"]
super().__init__(
command_prefix=self.get_prefix,
description=self.description,
help_command=commands.DefaultHelpCommand(dm_help=True),
case_insensitive=True,
*args,
**kwargs,
)
self.aio_session = aiohttp.ClientSession(loop=self.loop)
# self.rune_client = lolrune.AioRuneClient()
self.redis_client = aredis.StrictRedis(host="localhost", decode_responses=True)
self.startup_extensions = [x.stem for x in Path("cogs").glob("*.py")]
self.loop.run_until_complete(self.create_db_pool())
self.loop.run_until_complete(self.load_all_prefixes())
def run(self):
super().run(self.token)
async def load_all_prefixes(self):
pres = await self.pg_con.fetch("SELECT * from custom_prefix")
# Load custom prefixes into a dict
self.pre_dict = {r["guild_id"]: r["prefix"] for r in pres}
async def get_prefix(self, message):
try:
return ("qt.", self.pre_dict[message.guild.id])
except (KeyError, AttributeError):
return "qt."
async def create_db_pool(self):
with open(self.config_file) as f:
self.pg_pw = json.load(f)["postgres"]
self.pg_con = await asyncpg.create_pool(
user="james", password=self.pg_pw, database="discord_testing"
)
async def on_message(self, message):
ctx = await self.get_context(message, cls=CustomContext)
await self.invoke(ctx)
async def on_ready(self):
if not hasattr(self, "start_time"):
self.start_time = datetime.now()
self.start_time_str = self.start_time.strftime("%B %d %H:%M:%S")
for extension in self.startup_extensions:
if extension not in self.do_not_load:
try:
self.load_extension(f"cogs.{extension}")
except:
print(f"Failed Extension: {extension}")
traceback.print_exc()
else:
print(f"Loaded Extension: {extension}")
print(f"Client logged in at {self.start_time_str}")
print(self.user.name)
print(self.user.id)
print("----------")
| Python | 0 |
b8fb30a06ff15000a2d7542e7089b6c8ac1074e5 | Add --allow-drilled flag to cli.py, and increase recursion limit | cli.py | cli.py | # Copyright (c) 2015 Matthew Earl
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
Command line interface for solving a placement problem.
"""
__all__ = (
'main',
)
import argparse
import sys
import placer
import svg
def main(board, components, nets, args=None):
sys.setrecursionlimit(100000)
parser = argparse.ArgumentParser( description='Find circuit placements.')
parser.add_argument('--first-only', action='store_true',
help="Only output the first solution")
parser.add_argument('--allow-drilled', action='store_true',
help="Allow holes to be drilled out")
parser.add_argument('--svg', nargs='?', const=True,
help="Output SVG for the solutions")
parsed_args = parser.parse_args(args if args is not None else sys.argv[1:])
placement_iter = placer.place(board, components, nets,
allow_drilled=parsed_args.allow_drilled)
if parsed_args.first_only:
placement_iter = [next(placement_iter)]
if not parsed_args.svg:
count = 0
for placement in placement_iter:
placement.print_solution()
print()
count += 1
print("{} solutions".format(count))
else:
if isinstance(parsed_args.svg, str):
with open(parsed_args.svg, "w") as f:
svg.print_svg(placement_iter, file=f)
else:
svg.print_svg(placement_iter, file=sys.stdout)
| # Copyright (c) 2015 Matthew Earl
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
Command line interface for solving a placement problem.
"""
__all__ = (
'main',
)
import argparse
import sys
import placer
import svg
def main(board, components, nets, args=None):
parser = argparse.ArgumentParser( description='Find circuit placements.')
parser.add_argument('--first-only', action='store_true',
help="Only output the first solution")
parser.add_argument('--svg', nargs='?', const=True,
help="Output SVG for the solutions")
parsed_args = parser.parse_args(args if args is not None else sys.argv[1:])
placement_iter = placer.place(board, components, nets)
if parsed_args.first_only:
placement_iter = [next(placement_iter)]
if not parsed_args.svg:
count = 0
for placement in placement_iter:
placement.print_solution()
print()
count += 1
print("{} solutions".format(count))
else:
if isinstance(parsed_args.svg, str):
with open(parsed_args.svg, "w") as f:
svg.print_svg(placement_iter, file=f)
else:
svg.print_svg(placement_iter, file=sys.stdout)
| Python | 0.000001 |
2352ce413cebb9f0fd7b1f26bb33bd0325abedfd | make more pylint friendly | csw.py | csw.py | #!/usr/bin/python -u
# -*- coding: ISO-8859-15 -*-
# =================================================================
#
# $Id$
#
# Authors: Tom Kralidis <tomkralidis@hotmail.com>
#
# Copyright (c) 2010 Tom Kralidis
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation
# files (the "Software"), to deal in the Software without
# restriction, including without limitation the rights to use,
# copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following
# conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
#
# =================================================================
import os
from server import server
CONFIG = 'default.cfg'
if os.environ['QUERY_STRING'].lower().find('config') != -1:
for kvp in os.environ['QUERY_STRING'].split('&'):
if kvp.lower().find('config') != -1:
CONFIG = kvp.split('=')[1]
# get runtime configuration
CSW = server.Csw(CONFIG)
# go!
CSW.dispatch()
| #!/usr/bin/python -u
# -*- coding: ISO-8859-15 -*-
# =================================================================
#
# $Id$
#
# Authors: Tom Kralidis <tomkralidis@hotmail.com>
#
# Copyright (c) 2010 Tom Kralidis
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation
# files (the "Software"), to deal in the Software without
# restriction, including without limitation the rights to use,
# copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following
# conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
#
# =================================================================
import os
from server import server
config = 'default.cfg'
if os.environ['QUERY_STRING'].lower().find('config') != -1:
for kvp in os.environ['QUERY_STRING'].split('&'):
if kvp.lower().find('config') != -1:
config = kvp.split('=')[1]
# get runtime configuration
CSW = server.Csw(config)
# go!
CSW.dispatch()
| Python | 0.000002 |
12c7d473e2a270d46722b936a8fe9b62eb7548f1 | Add test for issue 203 | h5py/_hl/tests/test_slicing.py | h5py/_hl/tests/test_slicing.py | import numpy as np
from .common import ut, TestCase
import h5py
from h5py.highlevel import File
class BaseSlicing(TestCase):
def setUp(self):
self.f = File(self.mktemp(), 'w')
def tearDown(self):
if self.f:
self.f.close()
class TestSingleElement(BaseSlicing):
"""
Feature: Retrieving a single element works with NumPy semantics
"""
def test_single_index(self):
""" Single-element selection with [index] yields array scalar """
dset = self.f.create_dataset('x', (1,), dtype='i1')
out = dset[0]
self.assertIsInstance(out, np.int8)
def test_single_null(self):
""" Single-element selection with [()] yields ndarray """
dset = self.f.create_dataset('x', (1,), dtype='i1')
out = dset[()]
self.assertIsInstance(out, np.ndarray)
self.assertEqual(out.shape, (1,))
def test_scalar_index(self):
""" Slicing with [...] yields scalar ndarray """
dset = self.f.create_dataset('x', shape=(), dtype='f')
out = dset[...]
self.assertIsInstance(out, np.ndarray)
self.assertEqual(out.shape, ())
def test_scalar_null(self):
""" Slicing with [()] yields array scalar """
dset = self.f.create_dataset('x', shape=(), dtype='i1')
out = dset[()]
self.assertIsInstance(out, np.int8)
def test_compound(self):
""" Compound scalar is numpy.void, not tuple (issue 135) """
dt = np.dtype([('a','i4'),('b','f8')])
v = np.ones((4,), dtype=dt)
dset = self.f.create_dataset('foo', (4,), data=v)
self.assertEqual(dset[0], v[0])
self.assertIsInstance(dset[0], np.void)
class TestObjectIndex(BaseSlicing):
"""
Feauture: numpy.object_ subtypes map to real Python objects
"""
def test_reference(self):
""" Indexing a reference dataset returns a h5py.Reference instance """
dset = self.f.create_dataset('x', (1,), dtype=h5py.special_dtype(ref=h5py.Reference))
dset[0] = self.f.ref
self.assertEqual(type(dset[0]), h5py.Reference)
def test_regref(self):
""" Indexing a region reference dataset returns a h5py.RegionReference
"""
dset1 = self.f.create_dataset('x', (10,10))
regref = dset1.regionref[...]
dset2 = self.f.create_dataset('y', (1,), dtype=h5py.special_dtype(ref=h5py.RegionReference))
dset2[0] = regref
self.assertEqual(type(dset2[0]), h5py.RegionReference)
def test_scalar(self):
""" Indexing returns a real Python object on scalar datasets """
dset = self.f.create_dataset('x', (), dtype=h5py.special_dtype(ref=h5py.Reference))
dset[()] = self.f.ref
self.assertEqual(type(dset[()]), h5py.Reference)
def test_bytestr(self):
""" Indexing a byte string dataset returns a real python byte string
"""
dset = self.f.create_dataset('x', (1,), dtype=h5py.special_dtype(vlen=bytes))
dset[0] = b"Hello there!"
self.assertEqual(type(dset[0]), bytes)
class TestSimpleSlicing(TestCase):
"""
Feature: Simple NumPy-style slices (start:stop:step) are supported.
"""
def setUp(self):
self.f = File(self.mktemp(), 'w')
self.arr = np.arange(10)
self.dset = self.f.create_dataset('x', data=self.arr)
def tearDown(self):
if self.f:
self.f.close()
@ut.expectedFailure
def test_negative_stop(self):
""" Negative stop indexes work as they do in NumPy """
self.assertArrayEqual(self.dset[2:-2], self.arr[2:-2])
| import numpy as np
from .common import ut, TestCase
import h5py
from h5py.highlevel import File
class BaseSlicing(TestCase):
def setUp(self):
self.f = File(self.mktemp(), 'w')
def tearDown(self):
if self.f:
self.f.close()
class TestSingleElement(BaseSlicing):
"""
Feature: Retrieving a single element works with NumPy semantics
"""
def test_single_index(self):
""" Single-element selection with [index] yields array scalar """
dset = self.f.create_dataset('x', (1,), dtype='i1')
out = dset[0]
self.assertIsInstance(out, np.int8)
def test_single_null(self):
""" Single-element selection with [()] yields ndarray """
dset = self.f.create_dataset('x', (1,), dtype='i1')
out = dset[()]
self.assertIsInstance(out, np.ndarray)
self.assertEqual(out.shape, (1,))
def test_scalar_index(self):
""" Slicing with [...] yields scalar ndarray """
dset = self.f.create_dataset('x', shape=(), dtype='f')
out = dset[...]
self.assertIsInstance(out, np.ndarray)
self.assertEqual(out.shape, ())
def test_scalar_null(self):
""" Slicing with [()] yields array scalar """
dset = self.f.create_dataset('x', shape=(), dtype='i1')
out = dset[()]
self.assertIsInstance(out, np.int8)
def test_compound(self):
""" Compound scalar is numpy.void, not tuple (issue 135) """
dt = np.dtype([('a','i4'),('b','f8')])
v = np.ones((4,), dtype=dt)
dset = self.f.create_dataset('foo', (4,), data=v)
self.assertEqual(dset[0], v[0])
self.assertIsInstance(dset[0], np.void)
class TestObjectIndex(BaseSlicing):
"""
Feauture: numpy.object_ subtypes map to real Python objects
"""
def test_reference(self):
""" Indexing a reference dataset returns a h5py.Reference instance """
dset = self.f.create_dataset('x', (1,), dtype=h5py.special_dtype(ref=h5py.Reference))
dset[0] = self.f.ref
self.assertEqual(type(dset[0]), h5py.Reference)
def test_regref(self):
""" Indexing a region reference dataset returns a h5py.RegionReference
"""
dset1 = self.f.create_dataset('x', (10,10))
regref = dset1.regionref[...]
dset2 = self.f.create_dataset('y', (1,), dtype=h5py.special_dtype(ref=h5py.RegionReference))
dset2[0] = regref
self.assertEqual(type(dset2[0]), h5py.RegionReference)
def test_scalar(self):
""" Indexing returns a real Python object on scalar datasets """
dset = self.f.create_dataset('x', (), dtype=h5py.special_dtype(ref=h5py.Reference))
dset[()] = self.f.ref
self.assertEqual(type(dset[()]), h5py.Reference)
def test_bytestr(self):
""" Indexing a byte string dataset returns a real python byte string
"""
dset = self.f.create_dataset('x', (1,), dtype=h5py.special_dtype(vlen=bytes))
dset[0] = b"Hello there!"
self.assertEqual(type(dset[0]), bytes)
| Python | 0 |
8c87da20876c6b633988063eac81ff2b0f602dbb | Fix url encoding | ptscrape.py | ptscrape.py | #=======================================================================
# Screen-scraping framework
#=======================================================================
import logging
try:
import bs4 as soup
except ImportError:
import BeautifulSoup as soup
import urllib2
from urllib import urlencode
from urlparse import urljoin
import cookielib
import os
import re
_log = logging.getLogger(__name__)
class PageSource(object):
def __init__(self, cachedir=None, replay=False):
self.cachedir = cachedir
self.replay = replay
self.jar = cookielib.CookieJar()
self.agent = urllib2.build_opener(urllib2.HTTPCookieProcessor(self.jar))
# urllib2.HTTPRedirectHandler())
def get(self, url, query=None, tag=None):
'''HTTP GET request on a URL with optional query'''
if query:
url += '?' + urlencode(query)
_log.info('GET %s', url)
return self._transact(url, tag=tag)
def post(self, url, query=None, tag=None):
'''HTTP POST request on a URL with optional query'''
_log.info('POST %s', url)
data = ''
if query:
data = urlencode(query)
return self._transact(url, data, tag=tag)
def _transact(self, url, data=None, tag=None):
'''Perform an HTTP request, or fetch page from cache'''
if tag is None:
tag = os.path.basename(url)
if self.replay:
content = self.read_cache(tag)
else:
doc = self.agent.open(url, data)
_log.info('info %r', doc.info())
content = doc.read()
if self.cachedir:
self.write_cache(tag, content)
doc = soup.BeautifulSoup(content)
return Page(url, doc)
def read_cache(self, tag):
cachefile = os.path.join(os.path.expanduser(self.cachedir), tag)
with open(cachefile, 'rb') as f:
content = f.read()
return content
def write_cache(self, tag, content):
cachefile = os.path.join(os.path.expanduser(self.cachedir), tag)
with open(cachefile, 'wb') as f:
f.write(content)
class Page(object):
def __init__(self, url, doc):
self.url = url
self.doc = doc
def bs_cdata(tag):
'''Get the character data inside a BeautifulSoup element, ignoring all markup'''
return ''.join(tag.findAll(text=True))
if __name__=='__main__':
import argparse
ap = argparse.ArgumentParser()
ap.add_argument('--replay', action='store_true')
ap.add_argument('url')
args = ap.parse_args()
logging.basicConfig(level=logging.INFO)
| #=======================================================================
# Screen-scraping framework
#=======================================================================
import logging
try:
import bs4 as soup
except ImportError:
import BeautifulSoup as soup
import urllib2
from urllib import urlencode
from urlparse import urljoin
import cookielib
import os
import re
_log = logging.getLogger(__name__)
class PageSource(object):
def __init__(self, cachedir=None, replay=False):
self.cachedir = cachedir
self.replay = replay
self.jar = cookielib.CookieJar()
self.agent = urllib2.build_opener(urllib2.HTTPCookieProcessor(self.jar))
# urllib2.HTTPRedirectHandler())
def get(self, url, query=None, tag=None):
'''HTTP GET request on a URL with optional query'''
if query:
url += '?' + query.urlencode()
_log.info('GET %s', url)
return self._transact(url, tag=tag)
def post(self, url, query=None, tag=None):
'''HTTP POST request on a URL with optional query'''
_log.info('POST %s', url)
data = ''
if query:
data = urlencode(query)
return self._transact(url, data, tag=tag)
def _transact(self, url, data=None, tag=None):
'''Perform an HTTP request, or fetch page from cache'''
if tag is None:
tag = os.path.basename(url)
if self.replay:
content = self.read_cache(tag)
else:
doc = self.agent.open(url, data)
_log.info('info %r', doc.info())
content = doc.read()
if self.cachedir:
self.write_cache(tag, content)
doc = soup.BeautifulSoup(content)
return Page(url, doc)
def read_cache(self, tag):
cachefile = os.path.join(os.path.expanduser(self.cachedir), tag)
with open(cachefile, 'rb') as f:
content = f.read()
return content
def write_cache(self, tag, content):
cachefile = os.path.join(os.path.expanduser(self.cachedir), tag)
with open(cachefile, 'wb') as f:
f.write(content)
class Page(object):
def __init__(self, url, doc):
self.url = url
self.doc = doc
def bs_cdata(tag):
'''Get the character data inside a BeautifulSoup element, ignoring all markup'''
return ''.join(tag.findAll(text=True))
if __name__=='__main__':
import argparse
ap = argparse.ArgumentParser()
ap.add_argument('--replay', action='store_true')
ap.add_argument('url')
args = ap.parse_args()
logging.basicConfig(level=logging.INFO)
| Python | 0.998718 |
51f46c57e209e35063f67055e45ff6e26f8aa552 | Format error on urlfetch.get fail | heat/engine/resources/stack.py | heat/engine/resources/stack.py | # vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from requests import exceptions
from heat.common import exception
from heat.common import template_format
from heat.common import urlfetch
from heat.engine.properties import Properties
from heat.engine import stack_resource
from heat.openstack.common import log as logging
logger = logging.getLogger(__name__)
(PROP_TEMPLATE_URL,
PROP_TIMEOUT_MINS,
PROP_PARAMETERS) = ('TemplateURL', 'TimeoutInMinutes', 'Parameters')
class NestedStack(stack_resource.StackResource):
'''
A Resource representing a child stack to allow composition of templates.
'''
properties_schema = {
PROP_TEMPLATE_URL: {
'Type': 'String',
'Required': True,
'Description': _('The URL of a template that specifies the stack'
' to be created as a resource.')},
PROP_TIMEOUT_MINS: {
'Type': 'Number',
'Description': _('The length of time, in minutes, to wait for the'
' nested stack creation.')},
PROP_PARAMETERS: {
'Type': 'Map',
'Description': _('The set of parameters passed to this nested'
' stack.')}}
update_allowed_keys = ('Properties',)
update_allowed_properties = (PROP_TEMPLATE_URL, PROP_TIMEOUT_MINS,
PROP_PARAMETERS)
def handle_create(self):
try:
template_data = urlfetch.get(self.properties[PROP_TEMPLATE_URL])
except (exceptions.RequestException, IOError) as r_exc:
raise ValueError("Could not fetch remote template '%s': %s" %
(self.properties[PROP_TEMPLATE_URL], str(r_exc)))
template = template_format.parse(template_data)
return self.create_with_template(template,
self.properties[PROP_PARAMETERS],
self.properties[PROP_TIMEOUT_MINS])
def handle_delete(self):
return self.delete_nested()
def FnGetAtt(self, key):
if key and not key.startswith('Outputs.'):
raise exception.InvalidTemplateAttribute(resource=self.name,
key=key)
return self.get_output(key.partition('.')[-1])
def FnGetRefId(self):
return self.nested().identifier().arn()
def handle_update(self, json_snippet, tmpl_diff, prop_diff):
# Nested stack template may be changed even if the prop_diff is empty.
self.properties = Properties(self.properties_schema,
json_snippet.get('Properties', {}),
self.stack.resolve_runtime_data,
self.name)
try:
template_data = urlfetch.get(self.properties[PROP_TEMPLATE_URL])
except (exceptions.RequestException, IOError) as r_exc:
raise ValueError("Could not fetch remote template '%s': %s" %
(self.properties[PROP_TEMPLATE_URL], str(r_exc)))
template = template_format.parse(template_data)
return self.update_with_template(template,
self.properties[PROP_PARAMETERS],
self.properties[PROP_TIMEOUT_MINS])
def resource_mapping():
return {
'AWS::CloudFormation::Stack': NestedStack,
}
| # vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from heat.common import exception
from heat.common import template_format
from heat.common import urlfetch
from heat.engine.properties import Properties
from heat.engine import stack_resource
from heat.openstack.common import log as logging
logger = logging.getLogger(__name__)
(PROP_TEMPLATE_URL,
PROP_TIMEOUT_MINS,
PROP_PARAMETERS) = ('TemplateURL', 'TimeoutInMinutes', 'Parameters')
class NestedStack(stack_resource.StackResource):
'''
A Resource representing a child stack to allow composition of templates.
'''
properties_schema = {
PROP_TEMPLATE_URL: {
'Type': 'String',
'Required': True,
'Description': _('The URL of a template that specifies the stack'
' to be created as a resource.')},
PROP_TIMEOUT_MINS: {
'Type': 'Number',
'Description': _('The length of time, in minutes, to wait for the'
' nested stack creation.')},
PROP_PARAMETERS: {
'Type': 'Map',
'Description': _('The set of parameters passed to this nested'
' stack.')}}
update_allowed_keys = ('Properties',)
update_allowed_properties = (PROP_TEMPLATE_URL, PROP_TIMEOUT_MINS,
PROP_PARAMETERS)
def handle_create(self):
template_data = urlfetch.get(self.properties[PROP_TEMPLATE_URL])
template = template_format.parse(template_data)
return self.create_with_template(template,
self.properties[PROP_PARAMETERS],
self.properties[PROP_TIMEOUT_MINS])
def handle_delete(self):
return self.delete_nested()
def FnGetAtt(self, key):
if key and not key.startswith('Outputs.'):
raise exception.InvalidTemplateAttribute(resource=self.name,
key=key)
return self.get_output(key.partition('.')[-1])
def FnGetRefId(self):
return self.nested().identifier().arn()
def handle_update(self, json_snippet, tmpl_diff, prop_diff):
# Nested stack template may be changed even if the prop_diff is empty.
self.properties = Properties(self.properties_schema,
json_snippet.get('Properties', {}),
self.stack.resolve_runtime_data,
self.name)
template_data = urlfetch.get(self.properties[PROP_TEMPLATE_URL])
template = template_format.parse(template_data)
return self.update_with_template(template,
self.properties[PROP_PARAMETERS],
self.properties[PROP_TIMEOUT_MINS])
def resource_mapping():
return {
'AWS::CloudFormation::Stack': NestedStack,
}
| Python | 0.000002 |
22aead72594e5aa7047858c04beb3018e93c59fe | Revert "started 0.2.x" | api/apps.py | api/apps.py | from __future__ import unicode_literals
from django.apps import AppConfig
APP_NAME = 'vsemionov.notes.api'
APP_VERSION = '0.1.0'
class ApiConfig(AppConfig):
name = 'api'
| from __future__ import unicode_literals
from django.apps import AppConfig
APP_NAME = 'vsemionov.notes.api'
APP_VERSION = '0.2'
class ApiConfig(AppConfig):
name = 'api'
| Python | 0 |
492005db9a7c34b2648de8b7335bdbdd18ffb13b | Update setup.py with release version. | py/setup.py | py/setup.py | # Lint as: python3
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Config file for distributing package via Pypi server."""
import setuptools
# It is assumed that this file will moved to gps_building_block/py/setup.py,
# while the README resides at gps_building_blocks/README.md.
with open("../README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name="gps-building-blocks",
version="1.0.0",
author="gPS Team",
author_email="no-reply@google.com",
description="Modules and tools useful for use with advanced data solutions on Google Ads, Google Marketing Platform and Google Cloud.",
long_description=long_description,
long_description_tpye="text/markdown",
url="https://github.com/google/gps_building_blocks",
license="Apache Software License",
packages=setuptools.find_packages(),
install_requires=[
"absl-py==0.9.0",
"google-api-core==1.17.0",
"google-api-python-client==1.9.1",
"google-auth==1.16.0",
"google-cloud-bigquery==1.22.0",
"google-cloud-storage==1.28.1",
"requests==2.23.0",
"dataclasses; python_version<'3.7'"
],
classifiers=[
"Intended Audience :: Developers",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Internet",
"Topic :: Scientific/Engineering",
"License :: OSI Approved :: Apache Software License",
"Programming Language :: Python",
"Operating System :: OS Independent",
],
)
| # Lint as: python3
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Config file for distributing package via Pypi server."""
import setuptools
# It is assumed that this file will moved to gps_building_block/py/setup.py,
# while the README resides at gps_building_blocks/README.md.
with open("../README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name="gps-building-blocks",
version="0.1.12",
author="gPS Team",
author_email="no-reply@google.com",
description="Modules and tools useful for use with advanced data solutions on Google Ads, Google Marketing Platform and Google Cloud.",
long_description=long_description,
long_description_tpye="text/markdown",
url="https://github.com/google/gps_building_blocks",
license="Apache Software License",
packages=setuptools.find_packages(),
install_requires=[
"absl-py==0.9.0",
"google-api-core==1.17.0",
"google-api-python-client==1.9.1",
"google-auth==1.16.0",
"google-cloud-bigquery==1.22.0",
"google-cloud-storage==1.28.1",
"requests==2.23.0",
"dataclasses; python_version<'3.7'"
],
classifiers=[
"Intended Audience :: Developers",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Internet",
"Topic :: Scientific/Engineering",
"License :: OSI Approved :: Apache Software License",
"Programming Language :: Python",
"Operating System :: OS Independent",
],
)
| Python | 0 |
71b93971486ac4bf80284de43962d4704642a890 | add missing _ on line 37 | riskroll.py | riskroll.py | from sys import exit
from app.RollDice import roll
def get_number_of_combatants():
"""Take no input and return tuple of ints."""
num_of_attackers = [1,2,3]
num_of_defenders = [1,2]
attackers = 0
defenders = 0
while attackers not in num_of_attackers:
attackers = int(raw_input('How many attackers? [1,2,3]\n>'))
while defenders not in num_of_defenders:
defenders = int(raw_input('How many defenders? [1,2]\n>'))
return (attackers, defenders)
def fight(combatants):
"""Input tuple of ints and return tuple of lists of ints."""
attackers = combatants[0]
defenders = combatants[1]
attack_rolls = []
defence_rolls = []
attack_rolls = roll.main((attackers, 6))
defence_rolls = roll.main((defenders, 6))
return (attack_rolls, defence_rolls)
def divine_winner(attack_rolls, defence_rolls):
"""Take two lists of ints and return tuple."""
attack_rolls.sort()
defence_rolls.sort()
attack_wins = 0
attack_losses = 0
defence_wins = 0
defence_losses = 0
for i in xrange(len(defence_rolls), 0, -1):
if defence_rolls[i] >= attack_rolls[i]:
defence_wins = defence_wins + 1
attack_losses = attack_losses + 1
else:
attack_wins = attack_wins + 1
defence_losses = defence_losses + 1
attack_wl = (attack_wins,attack_losses)
defence_wl = (defence_wins,defence_losses)
return (attack_wl, defence_wl)
def print_results(attack_rolls, defence_rolls, attack_wl, defence_wl):
print 'Attacker rolls %r' % (attack_rolls)
print 'Defender rolls %r' % (defence_rolls)
print '\n'
print 'Attacker wins %d and loses %d' % (attack_wl[0], attack_wl[1])
print 'Defender wins %d and loses %d' % (defence_wl[0], defence_wl[1])
print '\n'
def restart():
"""Determine if another go is needed."""
options = ['s', 'd', 'x']
while again not in options:
again = raw_input('Roll the [s]ame, [d]ifferent, or e[x]it...\n>')
if again == 's':
return True
elif again == 'd':
return False
else:
exit()
if __name__ == '__main__':
repeat = False
while True:
if repeat == False:
num_combatants = get_number_of_combatants()
attack_rolls, defence_rolls = fight(num_combatants)
attack_wl, defence_wl = divine_winner(attack_rolls, defence_rolls)
print_results(attack_rolls, defence_rolls, attack_wl, defence_wl)
repeat = restart()
| from sys import exit
from app.RollDice import roll
def get_number_of_combatants():
"""Take no input and return tuple of ints."""
num_of_attackers = [1,2,3]
num_of_defenders = [1,2]
attackers = 0
defenders = 0
while attackers not in num_of_attackers:
attackers = int(raw_input('How many attackers? [1,2,3]\n>'))
while defenders not in num_of_defenders:
defenders = int(raw_input('How many defenders? [1,2]\n>'))
return (attackers, defenders)
def fight(combatants):
"""Input tuple of ints and return tuple of lists of ints."""
attackers = combatants[0]
defenders = combatants[1]
attack_rolls = []
defence_rolls = []
attack_rolls = roll.main((attackers, 6))
defence_rolls = roll.main((defenders, 6))
return (attack_rolls, defence_rolls)
def divine_winner(attack_rolls, defence_rolls):
"""Take two lists of ints and return tuple."""
attackrolls.sort()
defence_rolls.sort()
attack_wins = 0
attack_losses = 0
defence_wins = 0
defence_losses = 0
for i in xrange(len(defence_rolls), 0, -1):
if defence_rolls[i] >= attack_rolls[i]:
defence_wins = defence_wins + 1
attack_losses = attack_losses + 1
else:
attack_wins = attack_wins + 1
defence_losses = defence_losses + 1
attack_wl = (attack_wins,attack_losses)
defence_wl = (defence_wins,defence_losses)
return (attack_wl, defence_wl)
def print_results(attack_rolls, defence_rolls, attack_wl, defence_wl):
print 'Attacker rolls %r' % (attack_rolls)
print 'Defender rolls %r' % (defence_rolls)
print '\n'
print 'Attacker wins %d and loses %d' % (attack_wl[0], attack_wl[1])
print 'Defender wins %d and loses %d' % (defence_wl[0], defence_wl[1])
print '\n'
def restart():
"""Determine if another go is needed."""
options = ['s', 'd', 'x']
while again not in options:
again = raw_input('Roll the [s]ame, [d]ifferent, or e[x]it...\n>')
if again == 's':
return True
elif again == 'd':
return False
else:
exit()
if __name__ == '__main__':
repeat = False
while True:
if repeat == False:
num_combatants = get_number_of_combatants()
attack_rolls, defence_rolls = fight(num_combatants)
attack_wl, defence_wl = divine_winner(attack_rolls, defence_rolls)
print_results(attack_rolls, defence_rolls, attack_wl, defence_wl)
repeat = restart()
| Python | 0.999982 |
d31d767ec4c4452e8a1d5f9dd896ade19e4ac645 | Fix tests | run_test.py | run_test.py | import asynctwitch as at
class Bot(at.CommandBot, at.RankedBot):
pass
bot = Bot(
user='justinfan100' # read-only client
)
@bot.command("test", desc="Some test command")
async def test(m, arg1:int):
pass
bot.add_rank("test rank", points=10)
@bot.override
async def raw_event(data):
print(data)
@bot.override
async def event_roomstate(channel, tags):
bot.stop(exit=True)
print('Failed to exit!')
bot.start() | import asynctwitch as at
class Bot(at.CommandBot, at.RankedBot):
pass
bot = Bot(
user='justinfan100' # read-only client
)
@bot.command("test", desc="Some test command")
async def test(m, arg1:int):
pass
bot.add_rank("test rank", points=10)
@bot.override
async def raw_event(data):
print(data)
@bot.override
async def event_roomstate(tags):
bot.stop(exit=True)
print('Failed to exit!')
bot.start() | Python | 0.000003 |
38a086d2c5ebf73f7ad0108def2304262a2e0452 | Add trailing comma | runtests.py | runtests.py | #!/usr/bin/env python
import os
import sys
import django
from django.conf import settings
DEFAULT_SETTINGS = dict(
INSTALLED_APPS=[
"django.contrib.auth",
"django.contrib.contenttypes",
"django.contrib.sessions",
"django.contrib.sites",
"pinax.likes",
"pinax.likes.tests"
],
MIDDLEWARE_CLASSES=[],
DATABASES={
"default": {
"ENGINE": "django.db.backends.sqlite3",
"NAME": ":memory:",
}
},
SITE_ID=1,
ROOT_URLCONF="pinax.likes.tests.urls",
SECRET_KEY="notasecret",
PINAX_LIKES_LIKABLE_MODELS={
"auth.User": {
"like_text_on": "unlike",
"css_class_on": "fa-heart",
"like_text_off": "like",
"css_class_off": "fa-heart-o",
"allowed": lambda user, obj: True
},
"tests.Demo": {
"like_text_on": "unlike",
"css_class_on": "fa-heart",
"like_text_off": "like",
"css_class_off": "fa-heart-o"
}
},
AUTHENTICATION_BACKENDS=[
"pinax.likes.auth_backends.CanLikeBackend"
],
)
def runtests(*test_args):
if not settings.configured:
settings.configure(**DEFAULT_SETTINGS)
django.setup()
parent = os.path.dirname(os.path.abspath(__file__))
sys.path.insert(0, parent)
try:
from django.test.runner import DiscoverRunner
runner_class = DiscoverRunner
test_args = ["pinax.likes.tests"]
except ImportError:
from django.test.simple import DjangoTestSuiteRunner
runner_class = DjangoTestSuiteRunner
test_args = ["tests"]
failures = runner_class(verbosity=1, interactive=True, failfast=False).run_tests(test_args)
sys.exit(failures)
if __name__ == "__main__":
runtests(*sys.argv[1:])
| #!/usr/bin/env python
import os
import sys
import django
from django.conf import settings
DEFAULT_SETTINGS = dict(
INSTALLED_APPS=[
"django.contrib.auth",
"django.contrib.contenttypes",
"django.contrib.sessions",
"django.contrib.sites",
"pinax.likes",
"pinax.likes.tests"
],
MIDDLEWARE_CLASSES=[],
DATABASES={
"default": {
"ENGINE": "django.db.backends.sqlite3",
"NAME": ":memory:",
}
},
SITE_ID=1,
ROOT_URLCONF="pinax.likes.tests.urls",
SECRET_KEY="notasecret",
PINAX_LIKES_LIKABLE_MODELS={
"auth.User": {
"like_text_on": "unlike",
"css_class_on": "fa-heart",
"like_text_off": "like",
"css_class_off": "fa-heart-o",
"allowed": lambda user, obj: True
},
"tests.Demo": {
"like_text_on": "unlike",
"css_class_on": "fa-heart",
"like_text_off": "like",
"css_class_off": "fa-heart-o"
}
},
AUTHENTICATION_BACKENDS=[
"pinax.likes.auth_backends.CanLikeBackend"
]
)
def runtests(*test_args):
if not settings.configured:
settings.configure(**DEFAULT_SETTINGS)
django.setup()
parent = os.path.dirname(os.path.abspath(__file__))
sys.path.insert(0, parent)
try:
from django.test.runner import DiscoverRunner
runner_class = DiscoverRunner
test_args = ["pinax.likes.tests"]
except ImportError:
from django.test.simple import DjangoTestSuiteRunner
runner_class = DjangoTestSuiteRunner
test_args = ["tests"]
failures = runner_class(verbosity=1, interactive=True, failfast=False).run_tests(test_args)
sys.exit(failures)
if __name__ == "__main__":
runtests(*sys.argv[1:])
| Python | 0.999944 |
3cc083e08a586e61a8e89a549ba63c6bc5ede2bb | Add :mod:`firmant.writers.staticrst` to tests | runtests.py | runtests.py | #!/usr/bin/python
import gettext
import unittest
import doctest
import sys
from optparse import OptionParser
from minimock import Mock
from pprint import pprint
from pysettings.modules import get_module
gettext.install('firmant')
def safe_displayhook(s):
if s is not None:
sys.stdout.write('%r\n' % s)
sys.displayhook = safe_displayhook
if __name__ == '__main__':
suite = unittest.TestSuite()
modules = ['firmant.application',
'firmant.chunks',
'firmant.du',
'firmant.paginate',
'firmant.parsers',
'firmant.parsers.feeds',
'firmant.parsers.posts',
'firmant.parsers.tags',
'firmant.parsers.static',
'firmant.routing',
'firmant.routing.components',
'firmant.utils',
'firmant.utils.exceptions',
'firmant.utils.paths',
'firmant.writers',
'firmant.writers.feeds',
'firmant.writers.posts',
'firmant.writers.static',
'firmant.writers.staticrst',
'firmant.writers.j2'
]
if len(sys.argv[1:]) > 0:
modules = sys.argv[1:]
for module in modules:
mod = get_module(module)
args = {}
extraglobs = {'Mock': Mock
,'pprint': pprint
}
for arg, attr in [('module_relative', '_module_relative')
,('package', '_package')
,('setUp', '_setup')
,('tearDown', '_teardown')
,('globs', '_globs')
,('optionflags', '_optionflags')
,('parser', '_parser')
,('encoding', '_encoding')
]:
if hasattr(mod, attr):
args[arg] = getattr(mod, attr)
extraglobs.update(args.get('extraglobs', dict()))
args['extraglobs'] = extraglobs
suite.addTest(doctest.DocTestSuite(mod, **args))
results = unittest.TextTestRunner(verbosity=2).run(suite)
if not results.wasSuccessful():
sys.exit(1)
| #!/usr/bin/python
import gettext
import unittest
import doctest
import sys
from optparse import OptionParser
from minimock import Mock
from pprint import pprint
from pysettings.modules import get_module
gettext.install('firmant')
def safe_displayhook(s):
if s is not None:
sys.stdout.write('%r\n' % s)
sys.displayhook = safe_displayhook
if __name__ == '__main__':
suite = unittest.TestSuite()
modules = ['firmant.application',
'firmant.chunks',
'firmant.du',
'firmant.paginate',
'firmant.parsers',
'firmant.parsers.feeds',
'firmant.parsers.posts',
'firmant.parsers.tags',
'firmant.parsers.static',
'firmant.routing',
'firmant.routing.components',
'firmant.utils',
'firmant.utils.exceptions',
'firmant.utils.paths',
'firmant.writers',
'firmant.writers.feeds',
'firmant.writers.posts',
'firmant.writers.static',
'firmant.writers.j2'
]
if len(sys.argv[1:]) > 0:
modules = sys.argv[1:]
for module in modules:
mod = get_module(module)
args = {}
extraglobs = {'Mock': Mock
,'pprint': pprint
}
for arg, attr in [('module_relative', '_module_relative')
,('package', '_package')
,('setUp', '_setup')
,('tearDown', '_teardown')
,('globs', '_globs')
,('optionflags', '_optionflags')
,('parser', '_parser')
,('encoding', '_encoding')
]:
if hasattr(mod, attr):
args[arg] = getattr(mod, attr)
extraglobs.update(args.get('extraglobs', dict()))
args['extraglobs'] = extraglobs
suite.addTest(doctest.DocTestSuite(mod, **args))
results = unittest.TextTestRunner(verbosity=2).run(suite)
if not results.wasSuccessful():
sys.exit(1)
| Python | 0 |
98109c41048bb8330348cd0ab51a175328b056d6 | make runtests executable | runtests.py | runtests.py | #!/usr/bin/env python
import sys
from django.conf import settings
from django.core.management import execute_from_command_line
if not settings.configured:
settings.configure(
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
INSTALLED_APPS=(
## if you use auth.User:
#'django.contrib.auth',
## if you use contenttypes
# 'django.contrib.contenttypes',
'my_app',
'tests',
),
TEST_RUNNER='django_nose.NoseTestSuiteRunner',
# etc
)
def runtests():
argv = sys.argv[:1] + ['test', 'tests']
execute_from_command_line(argv)
if __name__ == '__main__':
runtests()
| #!/usr/bin/env python
import sys
from django.conf import settings
from django.core.management import execute_from_command_line
if not settings.configured:
settings.configure(
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
INSTALLED_APPS=(
## if you use auth.User:
#'django.contrib.auth',
## if you use contenttypes
# 'django.contrib.contenttypes',
'my_app',
'tests',
),
TEST_RUNNER='django_nose.NoseTestSuiteRunner',
# etc
)
def runtests():
argv = sys.argv[:1] + ['test', 'tests']
execute_from_command_line(argv)
if __name__ == '__main__':
runtests()
| Python | 0.000003 |
8830fece0992a6e1360440b51956c6ae6a4b034a | Add `SECRET_KEY` to django config | runtests.py | runtests.py | #!/usr/bin/env python
import sys
from os.path import abspath, dirname
import django
from django.conf import settings
from django.utils.crypto import get_random_string
sys.path.insert(0, abspath(dirname(__file__)))
if not settings.configured:
settings.configure(
SECRET_KEY=get_random_string(),
INSTALLED_APPS=(
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.auth',
'django.contrib.admin',
'email_log',
'email_log.tests',
),
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
EMAIL_LOG_BACKEND='django.core.mail.backends.locmem.EmailBackend',
MIDDLEWARE=[
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
],
ROOT_URLCONF='email_log.tests.urls',
TEMPLATES=[
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'APP_DIRS': True,
'OPTIONS': {
"context_processors": [
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
]
}
},
],
)
def runtests():
if hasattr(django, 'setup'):
django.setup()
try:
from django.test.runner import DiscoverRunner
runner_class = DiscoverRunner
test_args = ['email_log.tests']
except ImportError:
from django.test.simple import DjangoTestSuiteRunner
runner_class = DjangoTestSuiteRunner
test_args = ['tests']
failures = runner_class(failfast=False).run_tests(test_args)
sys.exit(failures)
if __name__ == "__main__":
runtests()
| #!/usr/bin/env python
import sys
from os.path import abspath, dirname
import django
from django.conf import settings
import django
sys.path.insert(0, abspath(dirname(__file__)))
if not settings.configured:
settings.configure(
INSTALLED_APPS=(
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.auth',
'django.contrib.admin',
'email_log',
'email_log.tests',
),
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
EMAIL_LOG_BACKEND = 'django.core.mail.backends.locmem.EmailBackend',
MIDDLEWARE=[
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
],
ROOT_URLCONF='email_log.tests.urls',
TEMPLATES=[
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'APP_DIRS': True,
'OPTIONS': {
"context_processors": [
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
]
}
},
],
)
def runtests():
if hasattr(django, 'setup'):
django.setup()
try:
from django.test.runner import DiscoverRunner
runner_class = DiscoverRunner
test_args = ['email_log.tests']
except ImportError:
from django.test.simple import DjangoTestSuiteRunner
runner_class = DjangoTestSuiteRunner
test_args = ['tests']
failures = runner_class(failfast=False).run_tests(test_args)
sys.exit(failures)
if __name__ == "__main__":
runtests()
| Python | 0.000009 |
d9caaf949e9fe59a656c5986180038b9b3dc34fe | remove league alias | bot/module/commands/command_processor.py | bot/module/commands/command_processor.py | import logging
from bot.module.commands.calendar.calendar_processor import CalendarProcessor
from bot.module.commands.info.info_processor import InfoProcessor
from bot.module.commands.crs.crs_processor import CrsProcessor
from bot.module.commands.wiki.wiki_processor import WikiProcessor
class CommandProcessor(InfoProcessor, CalendarProcessor, CrsProcessor, WikiProcessor):
"""Class processing all commands sent into the chat.
Attributes:
grenouille_bot: master class with all modules.
commands: list of all commands managed by the command processor.
"""
def __init__(self, grenouille_bot):
"""Define all commands the bot will process.
Args:
grenouille_bot: master class with all modules.
"""
InfoProcessor.__init__(self)
CalendarProcessor.__init__(self)
CrsProcessor.__init__(self)
WikiProcessor.__init__(self)
self.grenouille_bot = grenouille_bot
self.commands = [{
'aliases': ['grenouille', 'help', 'aide'],
'command': self.help
}, {
'aliases': ['motd', 'mdj'],
'command': self.motd
}, {
'aliases': ['who', 'qui'],
'command': self.who
}, {
'aliases': ['youtube', 'y'],
'command': self.youtube
}, {
'aliases': ['instagram', 'i'],
'command': self.instagram
}, {
'aliases': ['twitter', 't'],
'command': self.twitter
}, {
'aliases': ['now'],
'command': self.now
}, {
'aliases': ['next'],
'command': self.next
}, {
'aliases': ['update', 'u'],
'command': self.update
}, {
'aliases': ['toolmix'],
'command': self.toolmix
}, {
'aliases': ['ligue', 'ftvleague', 'ftvligue'],
'command': self.league
}, {
'aliases': ['wiki'],
'command': self.wiki
}]
def process(self, command_line, sender, is_admin):
"""Process a command.
Args:
command_line: Full command line without the ! stating a command.
sender: String sender of the command.
is_admin: Boolean representing user rights.
"""
command_split = command_line.split(' ', maxsplit=1)
command = self.find_command(command_split[0])
if command is None:
return
if len(command_split) == 1 or command_split[1] == '':
param_line = None
else:
param_line = command_split[1]
# Call the command
command(param_line=param_line, sender=sender, is_admin=is_admin)
def find_command(self, name):
"""Find if asked command exists and returns it.
Args:
name: Name of the command object to find.
Returns:
The command method responsible to process the command, or None if
no object is able to process it.
"""
for command in self.commands:
if name in command['aliases']:
return command['command']
return None
| import logging
from bot.module.commands.calendar.calendar_processor import CalendarProcessor
from bot.module.commands.info.info_processor import InfoProcessor
from bot.module.commands.crs.crs_processor import CrsProcessor
from bot.module.commands.wiki.wiki_processor import WikiProcessor
class CommandProcessor(InfoProcessor, CalendarProcessor, CrsProcessor, WikiProcessor):
"""Class processing all commands sent into the chat.
Attributes:
grenouille_bot: master class with all modules.
commands: list of all commands managed by the command processor.
"""
def __init__(self, grenouille_bot):
"""Define all commands the bot will process.
Args:
grenouille_bot: master class with all modules.
"""
InfoProcessor.__init__(self)
CalendarProcessor.__init__(self)
CrsProcessor.__init__(self)
WikiProcessor.__init__(self)
self.grenouille_bot = grenouille_bot
self.commands = [{
'aliases': ['grenouille', 'help', 'aide'],
'command': self.help
}, {
'aliases': ['motd', 'mdj'],
'command': self.motd
}, {
'aliases': ['who', 'qui'],
'command': self.who
}, {
'aliases': ['youtube', 'y'],
'command': self.youtube
}, {
'aliases': ['instagram', 'i'],
'command': self.instagram
}, {
'aliases': ['twitter', 't'],
'command': self.twitter
}, {
'aliases': ['now'],
'command': self.now
}, {
'aliases': ['next'],
'command': self.next
}, {
'aliases': ['update', 'u'],
'command': self.update
}, {
'aliases': ['toolmix'],
'command': self.toolmix
}, {
'aliases': ['league', 'ligue', 'ftvleague', 'ftvligue'],
'command': self.league
}, {
'aliases': ['wiki'],
'command': self.wiki
}]
def process(self, command_line, sender, is_admin):
"""Process a command.
Args:
command_line: Full command line without the ! stating a command.
sender: String sender of the command.
is_admin: Boolean representing user rights.
"""
command_split = command_line.split(' ', maxsplit=1)
command = self.find_command(command_split[0])
if command is None:
return
if len(command_split) == 1 or command_split[1] == '':
param_line = None
else:
param_line = command_split[1]
# Call the command
command(param_line=param_line, sender=sender, is_admin=is_admin)
def find_command(self, name):
"""Find if asked command exists and returns it.
Args:
name: Name of the command object to find.
Returns:
The command method responsible to process the command, or None if
no object is able to process it.
"""
for command in self.commands:
if name in command['aliases']:
return command['command']
return None
| Python | 0.000395 |
cc626bef4bb9ad4888362476a3ce9f92154f7d53 | Resolve #74 -- Use result.get instastad of ready | health_check/contrib/celery/plugin_health_check.py | health_check/contrib/celery/plugin_health_check.py | # -*- coding: utf-8 -*-
from datetime import datetime, timedelta
from django.conf import settings
from health_check.backends.base import (
BaseHealthCheckBackend, ServiceUnavailable,
ServiceReturnedUnexpectedResult)
from health_check.plugins import plugin_dir
from .tasks import add
class CeleryHealthCheck(BaseHealthCheckBackend):
def check_status(self):
timeout = getattr(settings, 'HEALTHCHECK_CELERY_TIMEOUT', 3)
try:
result = add.apply_async(
args=[4, 4],
expires=datetime.now() + timedelta(seconds=timeout)
)
result.get(timeout=timeout)
if result.result != 8:
self.add_error(ServiceReturnedUnexpectedResult("Celery return wrong result"))
except IOError as e:
self.add_error(ServiceUnavailable("IOError"), e)
except BaseException as e:
self.add_error(ServiceUnavailable("Unknown error"), e)
plugin_dir.register(CeleryHealthCheck)
| # -*- coding: utf-8 -*-
from datetime import datetime, timedelta
from time import sleep
from django.conf import settings
from health_check.backends.base import (
BaseHealthCheckBackend, ServiceUnavailable
)
from health_check.plugins import plugin_dir
from .tasks import add
class CeleryHealthCheck(BaseHealthCheckBackend):
def check_status(self):
timeout = getattr(settings, 'HEALTHCHECK_CELERY_TIMEOUT', 3)
try:
result = add.apply_async(
args=[4, 4],
expires=datetime.now() + timedelta(seconds=timeout)
)
now = datetime.now()
while (now + timedelta(seconds=3)) > datetime.now():
print(" checking....")
if result.ready():
try:
result.forget()
except NotImplementedError:
pass
return True
sleep(0.5)
except IOError:
raise ServiceUnavailable("IOError")
except:
raise ServiceUnavailable("Unknown error")
raise ServiceUnavailable("Unknown error")
plugin_dir.register(CeleryHealthCheck)
| Python | 0 |
ac6302f506299ed881ad4971ec30367e083c9433 | remove unneeded lower() call on repo name in require.rpm.repo(), as we're doing it early in the method | fabtools/require/rpm.py | fabtools/require/rpm.py | """
Rpm packages
===============
This module provides high-level tools for managing CentOS/RHEL/SL packages
and repositories.
"""
from __future__ import with_statement
from fabtools.system import get_arch
from fabtools.rpm import *
def package(pkg_name, repos=None, yes=None, options=None):
"""
Require a rpm package to be installed.
Example::
from fabtools import require
require.rpm.package('emacs')
"""
if not is_installed(pkg_name):
install(pkg_name, repos, yes, options)
def packages(pkg_list, repos=None, yes=None, options=None):
"""
Require several rpm packages to be installed.
Example::
from fabtools import require
require.rpm.packages([
'nano',
'unzip',
'vim',
])
"""
pkg_list = [pkg for pkg in pkg_list if not is_installed(pkg)]
if pkg_list:
install(pkg_list, repos, yes, options)
def nopackage(pkg_name, options=None):
"""
Require a rpm package to be uninstalled.
Example::
from fabtools import require
require.rpm.nopackage('emacs')
"""
if is_installed(pkg_name):
uninstall(pkg_name, options)
def nopackages(pkg_list, options=None):
"""
Require several rpm packages to be uninstalled.
Example::
from fabtools import require
require.rpm.nopackages([
'unzip',
'vim',
'emacs',
])
"""
pkg_list = [pkg for pkg in pkg_list if is_installed(pkg)]
if pkg_list:
uninstall(pkg_list, options)
def repository(name):
"""
Require a repository. Aimed for 3rd party repositories.
*Name* currently only supports EPEL and RPMforge.
Example::
from fabtools import require
# RPMforge packages for CentOS 6
require.rpm.repository('rpmforge')
"""
name = name.lower()
epel_url = 'http://download.fedoraproject.org/pub/epel'
rpmforge_url = 'http://packages.sw.be/rpmforge-release/rpmforge-release'
rpmforge_version = '0.5.2-2'
arch = get_arch()
try:
release = int(str(distrib_release()))
except ValueError:
release = int(float(str(distrib_release())))
if release == 6:
epel_version = '6-8'
elif release == 5:
epel_version = '5-4'
if name == 'rpmforge' and arch == 'i386':
arch = 'i686'
supported = {
'rpmforge': {'%(arch)s' % locals(): {
'6': '%(rpmforge_url)s-%(rpmforge_version)s.el6.rf.i686.rpm' % locals(),
'5': '%(rpmforge_url)s-%(rpmforge_version)s.el5.rf.x86_64.rpm' % locals()},
'epel': { '%(arch)s' % locals(): {
'6': '%(epel_url)s/6/%(arch)s/epel-release-%(epel_version)s.noarch.rpm' % locals(),
'5': '%(epel_url)s/5/%(arch)s/epel-release-%(epel_version)s.noarch.rpm' % locals()}}
}}
keys = {
'rpmforge': 'http://apt.sw.be/RPM-GPG-KEY.dag.txt',
'epel': '%(epel_url)s/RPM-GPG-KEY-EPEL-%(release)s' % locals()
}
repo = supported[name][str(arch)][str(release)]
key = keys[name]
with settings(hide('warnings'), warn_only=True):
sudo('rpm --import %(key)s' % locals())
sudo('rpm -Uh %(repo)s' % locals())
| """
Rpm packages
===============
This module provides high-level tools for managing CentOS/RHEL/SL packages
and repositories.
"""
from __future__ import with_statement
from fabtools.system import get_arch
from fabtools.rpm import *
def package(pkg_name, repos=None, yes=None, options=None):
"""
Require a rpm package to be installed.
Example::
from fabtools import require
require.rpm.package('emacs')
"""
if not is_installed(pkg_name):
install(pkg_name, repos, yes, options)
def packages(pkg_list, repos=None, yes=None, options=None):
"""
Require several rpm packages to be installed.
Example::
from fabtools import require
require.rpm.packages([
'nano',
'unzip',
'vim',
])
"""
pkg_list = [pkg for pkg in pkg_list if not is_installed(pkg)]
if pkg_list:
install(pkg_list, repos, yes, options)
def nopackage(pkg_name, options=None):
"""
Require a rpm package to be uninstalled.
Example::
from fabtools import require
require.rpm.nopackage('emacs')
"""
if is_installed(pkg_name):
uninstall(pkg_name, options)
def nopackages(pkg_list, options=None):
"""
Require several rpm packages to be uninstalled.
Example::
from fabtools import require
require.rpm.nopackages([
'unzip',
'vim',
'emacs',
])
"""
pkg_list = [pkg for pkg in pkg_list if is_installed(pkg)]
if pkg_list:
uninstall(pkg_list, options)
def repository(name):
"""
Require a repository. Aimed for 3rd party repositories.
*Name* currently only supports EPEL and RPMforge.
Example::
from fabtools import require
# RPMforge packages for CentOS 6
require.rpm.repository('rpmforge')
"""
name = name.lower()
epel_url = 'http://download.fedoraproject.org/pub/epel'
rpmforge_url = 'http://packages.sw.be/rpmforge-release/rpmforge-release'
rpmforge_version = '0.5.2-2'
arch = get_arch()
try:
release = int(str(distrib_release()))
except ValueError:
release = int(float(str(distrib_release())))
if release == 6:
epel_version = '6-8'
elif release == 5:
epel_version = '5-4'
if name.lower() == 'rpmforge' and arch == 'i386':
arch = 'i686'
supported = {
'rpmforge': {'%(arch)s' % locals(): {
'6': '%(rpmforge_url)s-%(rpmforge_version)s.el6.rf.i686.rpm' % locals(),
'5': '%(rpmforge_url)s-%(rpmforge_version)s.el5.rf.x86_64.rpm' % locals()},
'epel': { '%(arch)s' % locals(): {
'6': '%(epel_url)s/6/%(arch)s/epel-release-%(epel_version)s.noarch.rpm' % locals(),
'5': '%(epel_url)s/5/%(arch)s/epel-release-%(epel_version)s.noarch.rpm' % locals()}}
}}
keys = {
'rpmforge': 'http://apt.sw.be/RPM-GPG-KEY.dag.txt',
'epel': '%(epel_url)s/RPM-GPG-KEY-EPEL-%(release)s' % locals()
}
repo = supported[name][str(arch)][str(release)]
key = keys[name]
with settings(hide('warnings'), warn_only=True):
sudo('rpm --import %(key)s' % locals())
sudo('rpm -Uh %(repo)s' % locals())
| Python | 0 |
f641c4be6e88aac1e1968ca8f07c5294d4dfe6fa | Bump version | facturapdf/__about__.py | facturapdf/__about__.py | __title__ = 'facturapdf'
__summary__ = 'Create PDF invoice according to Spanish regulations.'
__version__ = '0.0.3'
__license__ = 'BSD 3-Clause License'
__uri__ = 'https://github.com/initios/factura-pdf'
__author__ = 'Carlos Goce'
__email__ = 'cgonzalez@initios.com'
| __title__ = 'facturapdf'
__summary__ = 'Create PDF invoice according to Spanish regulations.'
__version__ = '0.0.2'
__license__ = 'BSD 3-Clause License'
__uri__ = 'https://github.com/initios/factura-pdf'
__author__ = 'Carlos Goce'
__email__ = 'cgonzalez@initios.com'
| Python | 0 |
1e3199618f55be86fa5e4259d1c6e4a7074e57ca | Update environment.py | features/environment.py | features/environment.py | """
Copyright 2017 Raul Alvarez
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import Selenium
def before_all(context):
runner = Selenium.Selenium()
context.runner = runner
def after_all(context):
context.runner.quit()
| """
Copyright 2017 Raul Alvarez
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""import Selenium
def before_all(context):
runner = Selenium.Selenium()
context.runner = runner
def after_all(context):
context.runner.quit()
| Python | 0.000001 |
44a41555d4f2ec3eed090711f34b233085e1aebf | add missing config entries | feedservice/settings.py | feedservice/settings.py | # -*- coding: utf-8 -*-
import os, os.path
def bool_env(val, default):
"""Replaces string based environment values with Python booleans"""
if not val in os.environ:
return default
return True if os.environ.get(val) == 'True' else False
DEBUG = bool_env('MYGPOFS_DEBUG', True)
TEMPLATE_DEBUG = DEBUG
ADMINS = (
('Stefan Kögl', 'stefan@skoegl.net'),
)
MANAGERS = ADMINS
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# On Unix systems, a value of None will cause Django to use the same
# timezone as the operating system.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'UTC'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale
USE_L10N = True
# Static asset configuration
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
BASE_DIR = os.path.join(BASE_DIR, '../htdocs')
STATIC_ROOT = 'static'
STATIC_URL = '/media/'
STATICFILES_DIRS = (
os.path.join(BASE_DIR, 'media'),
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = 'm6jkg5lzard@k^p(wui4gtx_zu4s=26c+c0bk+k1xsik6+derf'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.app_directories.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
)
ROOT_URLCONF = 'feedservice.urls'
WSGI_APPLICATION = 'feedservice.wsgi.application'
TEMPLATE_DIRS = (
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.staticfiles',
'feedservice.parse',
'feedservice.urlstore',
'feedservice.webservice',
)
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
BASE_URL='http://localhost:8080/'
import dj_database_url
DATABASES = {'default': dj_database_url.config()}
SOUNDCLOUD_CONSUMER_KEY = os.getenv('MYGPOFS_SOUNDCLOUD_CONSUMER_KEY', '')
FLATTR_THING = ''
ALLOWED_HOSTS = filter(None, os.getenv('MYGPOFS_ALLOWED_HOSTS', '').split(';'))
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
try:
from settings_prod import *
except ImportError, e:
import sys
print >> sys.stderr, 'create settings_prod.py with your customized settings'
| # -*- coding: utf-8 -*-
import os, os.path
def bool_env(val, default):
"""Replaces string based environment values with Python booleans"""
if not val in os.environ:
return default
return True if os.environ.get(val) == 'True' else False
DEBUG = bool_env('MYGPOFS_DEBUG', True)
TEMPLATE_DEBUG = DEBUG
ADMINS = (
('Stefan Kögl', 'stefan@skoegl.net'),
)
MANAGERS = ADMINS
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# On Unix systems, a value of None will cause Django to use the same
# timezone as the operating system.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'UTC'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale
USE_L10N = True
# Static asset configuration
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
BASE_DIR = os.path.join(BASE_DIR, '../htdocs')
STATIC_ROOT = 'static'
STATIC_URL = '/media/'
STATICFILES_DIRS = (
os.path.join(BASE_DIR, 'media'),
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = 'm6jkg5lzard@k^p(wui4gtx_zu4s=26c+c0bk+k1xsik6+derf'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.app_directories.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
)
ROOT_URLCONF = 'feedservice.urls'
TEMPLATE_DIRS = (
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.staticfiles',
'feedservice.parse',
'feedservice.urlstore',
'feedservice.webservice',
)
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
BASE_URL='http://localhost:8080/'
import dj_database_url
DATABASES = {'default': dj_database_url.config()}
SOUNDCLOUD_CONSUMER_KEY = os.getenv('MYGPOFS_SOUNDCLOUD_CONSUMER_KEY', '')
FLATTR_THING = ''
ALLOWED_HOSTS = filter(None, os.getenv('MYGPOFS_ALLOWED_HOSTS', '').split(';'))
try:
from settings_prod import *
except ImportError, e:
import sys
print >> sys.stderr, 'create settings_prod.py with your customized settings'
| Python | 0.000002 |
576ea74646935c00e051a46244b8f56165710df0 | Add multicast send | circuits/node/server.py | circuits/node/server.py | # Module: server
# Date: ...
# Author: ...
"""Server
...
"""
from circuits.net.sockets import TCPServer
from circuits import handler, BaseComponent
from .protocol import Protocol
class Server(BaseComponent):
"""Server
...
"""
channel = 'node'
__protocol = {}
def __init__(self, bind, channel=channel, **kwargs):
super(Server, self).__init__(channel=channel, **kwargs)
self.server = TCPServer(bind, channel=self.channel, **kwargs)
self.server.register(self)
self.__receive_event_firewall = kwargs.get(
'receive_event_firewall',
None
)
self.__send_event_firewall = kwargs.get(
'send_event_firewall',
None
)
def send(self, event, sock):
return self.__protocol[sock].send(event)
def send_to(self, event, socks):
for sock in socks:
self.send(event, sock)
def send_all(self, event):
for sock in self.__protocol:
self.__protocol[sock].send(event)
@handler('read')
def _on_read(self, sock, data):
self.__protocol[sock].add_buffer(data)
@property
def host(self):
if hasattr(self, 'server'):
return self.server.host
@property
def port(self):
if hasattr(self, 'server'):
return self.server.port
@handler('connect')
def __connect_peer(self, sock, host, port):
self.__protocol[sock] = Protocol(
sock=sock,
server=self.server,
receive_event_firewall=self.__receive_event_firewall,
send_event_firewall=self.__send_event_firewall
).register(self)
@handler('disconnect')
def __disconnect_peer(self, sock):
for s in self.__protocol.copy():
try:
s.getpeername()
except:
del(self.__protocol[s])
| # Module: server
# Date: ...
# Author: ...
"""Server
...
"""
from circuits.net.sockets import TCPServer
from circuits import handler, BaseComponent
from .protocol import Protocol
class Server(BaseComponent):
"""Server
...
"""
channel = 'node'
__protocol = {}
def __init__(self, bind, channel=channel, **kwargs):
super(Server, self).__init__(channel=channel, **kwargs)
self.server = TCPServer(bind, channel=self.channel, **kwargs)
self.server.register(self)
self.__receive_event_firewall = kwargs.get(
'receive_event_firewall',
None
)
self.__send_event_firewall = kwargs.get(
'send_event_firewall',
None
)
def send(self, event, sock):
return self.__protocol[sock].send(event)
def send_all(self, event):
for sock in self.__protocol:
self.__protocol[sock].send(event)
@handler('read')
def _on_read(self, sock, data):
self.__protocol[sock].add_buffer(data)
@property
def host(self):
if hasattr(self, 'server'):
return self.server.host
@property
def port(self):
if hasattr(self, 'server'):
return self.server.port
@handler('connect')
def __connect_peer(self, sock, host, port):
self.__protocol[sock] = Protocol(
sock=sock,
server=self.server,
receive_event_firewall=self.__receive_event_firewall,
send_event_firewall=self.__send_event_firewall
).register(self)
@handler('disconnect')
def __disconnect_peer(self, sock):
for s in self.__protocol.copy():
try:
s.getpeername()
except:
del(self.__protocol[s])
| Python | 0.000001 |
8c68031928f54d38c92308504bc93bf61ead57f5 | Update clashcallerbot_reply.py added get list of messages older than current datetime, updated outline | clashcallerbot_reply.py | clashcallerbot_reply.py | #! python3
# -*- coding: utf-8 -*-
"""Checks messages in database and sends PM if expiration time passed.
This module checks messages saved in a MySQL-compatible database and sends a reminder
via PM if the expiration time has passed. If so, the message is removed from the
database.
"""
import praw
import praw.exceptions
import logging.config
import datetime
import clashcallerbot_database as db
# Logger
logging.config.fileConfig('logging.conf', disable_existing_loggers=False)
logging.raiseExceptions = True # Production mode if False (no console sys.stderr output)
logger = logging.getLogger('reply')
# Generate reddit instance
reddit = praw.Reddit('clashcallerreply') # Section name in praw.ini
subreddit = reddit.subreddit('ClashCallerBot') # Limit scope for testing purposes
def main():
while True:
# Get list of messages older than current datetime
now = datetime.datetime.now(datetime.timezone.utc)
messages = db.get_messages(now)
if not messages:
continue
# TODO: Send reminder PM
# TODO: Delete message from database
# If run directly, instead of imported as a module, run main():
if __name__ == '__main__':
main()
| #! python3
# -*- coding: utf-8 -*-
"""Checks messages in database and sends PM if expiration time passed.
This module checks messages saved in a MySQL-compatible database and sends a reminder
via PM if the expiration time has passed. If so, the message is removed from the
database.
"""
import praw
import praw.exceptions
import logging.config
import datetime
import clashcallerbot_database as db
# Logger
logging.config.fileConfig('logging.conf', disable_existing_loggers=False)
logging.raiseExceptions = True # Production mode if False (no console sys.stderr output)
logger = logging.getLogger('reply')
# Generate reddit instance
reddit = praw.Reddit('clashcallerreply') # Section name in praw.ini
subreddit = reddit.subreddit('ClashCallerBot') # Limit scope for testing purposes
def main():
while True:
# TODO: Get list of messages ordered by expiration date (in MySQL)
# TODO: Compare each message expiration datetime to current datetime (in MySQL?)
# TODO: If current datetime is after expiration datetime, send PM
# TODO: Delete message from database
pass
# If run directly, instead of imported as a module, run main():
if __name__ == '__main__':
main()
| Python | 0 |
f7f16611754181c28b1c8c6a3e5942731f851c46 | add some docstring | fileparser/extractor.py | fileparser/extractor.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
r"""
# .---. .-----------
# / \ __ / ------
# / / \( )/ ----- (`-') _ _(`-') <-. (`-')_
# ////// '\/ ` --- ( OO).-/( (OO ).-> .-> \( OO) ) .->
# //// / // : : --- (,------. \ .'_ (`-')----. ,--./ ,--/ ,--.' ,-.
# // / / / `\/ '-- | .---' '`'-..__)( OO).-. ' | \ | | (`-')'.' /
# // //..\\ (| '--. | | ' |( _) | | | | . '| |)(OO \ /
# ============UU====UU==== | .--' | | / : \| |)| | | |\ | | / /)
# '//||\\` | `---. | '-' / ' '-' ' | | \ | `-/ /`
# ''`` `------' `------' `-----' `--' `--' `--'
# ######################################################################################
#
# Author: edony - edonyzpc@gmail.com
#
# twitter : @edonyzpc
#
# Last modified: 2015-05-28 22:20
#
# Filename: extractor.py
#
# Description: All Rights Are Reserved
#
# ******
# Extract the specific content from text with the given keys.
"""
import os
import re
class Extractor(object):
"""
Extract the specific content with keys.
"""
def __init__(self, keys, extracted_file, output_file=None, flag=None):
if type(keys) is list:
self.keys = keys
elif type(keys) is str:
self.keys =[keys]
else:
raise ValueError("Wrong Key type")
if output_file:
self.output_file = output_file
else:
self.output_file = 'EXTRACT'
if flag is None:
self.flag = [0, 0] #flag are controlling add the keys into write file
else:
self.flag = flag
self.pattern = Extractor.re_pattern(self.keys)
self.extracted_file = extracted_file
@staticmethod
def re_pattern(keys):
if len(keys) > 2:
raise ValueError("The keys are too much, simplify them less than 2.\n")
regular_expression = keys[0] + '(?P<con>.*)'
if len(keys) == 2:
regular_expression += keys[1]
return re.compile(regular_expression)
def parser(self):
"""
Extract the content between keys(if has).
"""
with open(self.output_file, 'w') as out:
with open(self.extracted_file) as exfile:
for line in exfile.readlines():
g = self.pattern.search(line)
if g:
if self.flag[0]:
out.write(self.keys[0])
out.write(g.group('con'))
if self.flag[1]:
out.write(self.keys[1])
out.write('\n')
print('Finish Extract')
if __name__ == '__main__':
tmp = Extractor('http:', 'career_old', flag=[1,0])
tmp.parser()
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
r"""
# .---. .-----------
# / \ __ / ------
# / / \( )/ ----- (`-') _ _(`-') <-. (`-')_
# ////// '\/ ` --- ( OO).-/( (OO ).-> .-> \( OO) ) .->
# //// / // : : --- (,------. \ .'_ (`-')----. ,--./ ,--/ ,--.' ,-.
# // / / / `\/ '-- | .---' '`'-..__)( OO).-. ' | \ | | (`-')'.' /
# // //..\\ (| '--. | | ' |( _) | | | | . '| |)(OO \ /
# ============UU====UU==== | .--' | | / : \| |)| | | |\ | | / /)
# '//||\\` | `---. | '-' / ' '-' ' | | \ | `-/ /`
# ''`` `------' `------' `-----' `--' `--' `--'
# ######################################################################################
#
# Author: edony - edonyzpc@gmail.com
#
# twitter : @edonyzpc
#
# Last modified: 2015-05-28 22:20
#
# Filename: extractor.py
#
# Description: All Rights Are Reserved
#
# ******
# Extract the specific content from text with the given keys.
"""
import os
import re
class Extractor(object):
"""
Extract the specific content with keys.
"""
def __init__(self, keys, extracted_file, output_file=None, flag=None):
if type(keys) is list:
self.keys = keys
elif type(keys) is str:
self.keys =[keys]
else:
raise ValueError("Wrong Key type")
if output_file:
self.output_file = output_file
else:
self.output_file = 'EXTRACT'
if flag is None:
self.flag = [0, 0] #flag are controlling add the keys into write file
else:
self.flag = flag
self.pattern = Extractor.re_pattern(self.keys)
self.extracted_file = extracted_file
@staticmethod
def re_pattern(keys):
if len(keys) > 2:
raise ValueError("The keys are too much, simplify them less than 2.\n")
regular_expression = keys[0] + '(?P<con>.*)'
if len(keys) == 2:
regular_expression += keys[1]
return re.compile(regular_expression)
def parser(self):
with open(self.output_file, 'w') as out:
with open(self.extracted_file) as exfile:
for line in exfile.readlines():
g = self.pattern.search(line)
if g:
if self.flag[0]:
out.write(self.keys[0])
out.write(g.group('con'))
if self.flag[1]:
out.write(self.keys[1])
out.write('\n')
print('Finish Extract')
if __name__ == '__main__':
tmp = Extractor('http:', 'career_old', flag=[1,0])
tmp.parser()
| Python | 0.000041 |
42923355855a53cd7d6df23f666c2c74a07c7068 | fix healpix_helper to take of nans | lib/healpix_helper.py | lib/healpix_helper.py | import pyfits
import numpy as np
try:
import pywcs
except ImportError:
import astropy.pywcs as pywcs
import healpy
import warnings
class HealpixData(object):
def __init__(self, nside, data, coord=None, nested=False, flipy=False):
self._nside = nside
self._data = data
self._nested = nested
self._flipy = flipy
self._coord = coord
def get_projected_map(self, header):
map_shape = (header["naxis2"], header["naxis1"])
iy, ix = np.indices(map_shape)
wcs = pywcs.WCS(header)
phi, theta = wcs.wcs_pix2sky(ix, iy, 0)
if self._coord is not None:
from pywcsgrid2.wcs_helper import coord_system_guess, sky2sky
map_coord = coord_system_guess(header["ctype1"],
header["ctype2"],
equinox=header["equinox"])
if (map_coord is not None) and (map_coord != self._coord):
warnings.warn(" doing the conversion " + map_coord)
phi, theta = sky2sky(map_coord, self._coord)(phi, theta)
if self._flipy:
theta -= 90
theta *= -np.pi/180.
else:
theta += 90
theta *= np.pi/180.
phi *= np.pi/180
if self._nested:
ang2pix = healpy._healpy_pixel_lib._ang2pix_nest
else:
ang2pix = healpy._healpy_pixel_lib._ang2pix_ring
# some values could be NaNs. Maske those out before calling
# ang2pix and recover them.
mask = np.isfinite(theta) & np.isfinite(theta)
ipix = ang2pix(self._nside, theta[mask], phi[mask])
map_data_ = self._data[ipix]
map_data = np.empty(map_shape, dtype=map_data_.dtype)
map_data.fill(np.nan)
map_data.flat[mask] = map_data_
return map_data
if __name__ == '__main__':
fname = "LAB_fullvel.fits"
f = pyfits.open(fname)
header = f[1].header
ordering = header["ordering"]
nside = header["nside"]
data = f[1].data["temperature"]
healpix_data = HealpixData(nside, data.flat, nested=False)
fits_name = "lambda_mollweide_halpha_fwhm06_0512.fits"
f2 = pyfits.open(fits_name)
d = healpix_data.get_projected_map(f2[1].header)
#data2 = f2[1].data
#header2 = f2[1].header
| import pyfits
import numpy as np
try:
import pywcs
except ImportError:
import astropy.pywcs as pywcs
import healpy
import warnings
class HealpixData(object):
def __init__(self, nside, data, coord=None, nested=False, flipy=False):
self._nside = nside
self._data = data
self._nested = nested
self._flipy = flipy
self._coord = coord
def get_projected_map(self, header):
map_shape = (header["naxis2"], header["naxis1"])
iy, ix = np.indices(map_shape)
wcs = pywcs.WCS(header)
phi, theta = wcs.wcs_pix2sky(ix, iy, 0)
if self._coord is not None:
from pywcsgrid2.wcs_helper import coord_system_guess, sky2sky
map_coord = coord_system_guess(header["ctype1"],
header["ctype2"],
equinox=header["equinox"])
if (map_coord is not None) and (map_coord != self._coord):
warnings.warn(" doing the conversion " + map_coord)
phi, theta = sky2sky(map_coord, self._coord)(phi, theta)
if self._flipy:
theta -= 90
theta *= -np.pi/180.
else:
theta += 90
theta *= np.pi/180.
phi *= np.pi/180
if self._nested:
ang2pix = healpy._healpy_pixel_lib._ang2pix_nest
else:
ang2pix = healpy._healpy_pixel_lib._ang2pix_ring
ipix = ang2pix(self._nside, theta, phi)
map_data = self._data[ipix].reshape(map_shape)
return map_data
if __name__ == '__main__':
fname = "LAB_fullvel.fits"
f = pyfits.open(fname)
header = f[1].header
ordering = header["ordering"]
nside = header["nside"]
data = f[1].data["temperature"]
healpix_data = HealpixData(nside, data.flat, nested=False)
fits_name = "lambda_mollweide_halpha_fwhm06_0512.fits"
f2 = pyfits.open(fits_name)
d = healpix_data.get_projected_map(f2[1].header)
#data2 = f2[1].data
#header2 = f2[1].header
| Python | 0 |
2cbae4650422f7982ef50e564e9e27e7fd294be8 | Add ability to nix product to admin | fjord/feedback/admin.py | fjord/feedback/admin.py | from django.contrib import admin
from django.core.exceptions import PermissionDenied
from fjord.feedback.models import Product, Response
class ProductAdmin(admin.ModelAdmin):
list_display = (
'id',
'enabled',
'on_dashboard',
'display_name',
'db_name',
'translation_system',
'notes',
'slug')
list_filter = ('enabled', 'on_dashboard')
class EmptyFriendlyAVFLF(admin.AllValuesFieldListFilter):
def choices(self, cl):
"""Displays empty string as <Empty>
This makes it possible to choose Empty in the filter
list. Otherwise empty strings display as '' and don't get any
height and thus aren't selectable.
"""
for choice in super(EmptyFriendlyAVFLF, self).choices(cl):
if choice.get('display') == '':
choice['display'] = '<Empty>'
yield choice
class ResponseFeedbackAdmin(admin.ModelAdmin):
list_display = ('created', 'product', 'channel', 'version', 'happy',
'description', 'user_agent', 'locale')
list_filter = ('happy', ('product', EmptyFriendlyAVFLF),
('locale', EmptyFriendlyAVFLF))
search_fields = ('description',)
actions = ['nix_product']
list_per_page = 200
def nix_product(self, request, queryset):
ret = queryset.update(product=u'')
self.message_user(request, '%s responses updated.' % ret)
nix_product.short_description = u'Remove product for selected responses'
def queryset(self, request):
# Note: This ignores the super() queryset and uses the
# uncached manager.
return Response.uncached.all()
def has_add_permission(self, request, obj=None):
# Prevent anyone from adding feedback in the admin.
return False
def change_view(self, request, *args, **kwargs):
# We don't want anyone (including superusers) to change
# feedback. It's either keep it or delete it.
#
# That's sort of difficult with Django without writing a bunch
# of stuff, so I'm lazily preventing POST here.
#
# TODO: Make this better, but push off any changes until other
# non-superuser people have access to this view and it becomes
# a relevant issue.
if request.method == 'POST':
raise PermissionDenied()
return super(ResponseFeedbackAdmin, self).change_view(
request, *args, **kwargs)
admin.site.register(Product, ProductAdmin)
admin.site.register(Response, ResponseFeedbackAdmin)
| from django.contrib import admin
from django.core.exceptions import PermissionDenied
from fjord.feedback.models import Product, Response
class ProductAdmin(admin.ModelAdmin):
list_display = (
'id',
'enabled',
'on_dashboard',
'display_name',
'db_name',
'translation_system',
'notes',
'slug')
list_filter = ('enabled', 'on_dashboard')
class EmptyFriendlyAVFLF(admin.AllValuesFieldListFilter):
def choices(self, cl):
"""Displays empty string as <Empty>
This makes it possible to choose Empty in the filter
list. Otherwise empty strings display as '' and don't get any
height and thus aren't selectable.
"""
for choice in super(EmptyFriendlyAVFLF, self).choices(cl):
if choice.get('display') == '':
choice['display'] = '<Empty>'
yield choice
class ResponseFeedbackAdmin(admin.ModelAdmin):
list_display = ('created', 'product', 'channel', 'version', 'happy',
'description', 'user_agent', 'locale')
list_filter = ('happy', ('product', EmptyFriendlyAVFLF),
('locale', EmptyFriendlyAVFLF))
search_fields = ('description',)
def queryset(self, request):
# Note: This ignores the super() queryset and uses the
# uncached manager.
return Response.uncached.all()
def has_add_permission(self, request, obj=None):
# Prevent anyone from adding feedback in the admin.
return False
def change_view(self, request, *args, **kwargs):
# We don't want anyone (including superusers) to change
# feedback. It's either keep it or delete it.
#
# That's sort of difficult with Django without writing a bunch
# of stuff, so I'm lazily preventing POST here.
#
# TODO: Make this better, but push off any changes until other
# non-superuser people have access to this view and it becomes
# a relevant issue.
if request.method == 'POST':
raise PermissionDenied()
return super(ResponseFeedbackAdmin, self).change_view(
request, *args, **kwargs)
admin.site.register(Product, ProductAdmin)
admin.site.register(Response, ResponseFeedbackAdmin)
| Python | 0 |
e19e2d69baabac3adedfae4e7a8c6ef5bb3d6f53 | Fix alembic script | alembic/versions/4e0500347ce7_add_multigame_tables.py | alembic/versions/4e0500347ce7_add_multigame_tables.py | """add multigame tables
Revision ID: 4e0500347ce7
Revises: 29344aa34d9
Create Date: 2016-04-05 23:51:58.647657
"""
# revision identifiers, used by Alembic.
revision = '4e0500347ce7'
down_revision = '29344aa34d9'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('gameversion', sa.Column('game_id', sa.Integer(), nullable=True))
op.create_foreign_key('gameversion_game_id_fkey', 'gameversion', 'game', ['game_id'], ['id'])
op.add_column('mod', sa.Column('game_id', sa.Integer(), nullable=True))
op.create_foreign_key('mod_game_id_fkey', 'mod', 'game', ['game_id'], ['id'])
op.add_column('modlist', sa.Column('game_id', sa.Integer(), nullable=True))
op.create_foreign_key('modlist_game_id_fkey', 'modlist', 'game', ['game_id'], ['id'])
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_constraint('modlist_game_id_fkey', 'modlist', type_='foreignkey')
op.drop_column('modlist', 'game_id')
op.drop_constraint('mod_game_id_fkey', 'mod', type_='foreignkey')
op.drop_column('mod', 'game_id')
op.drop_constraint('gameversion_game_id_fkey', 'gameversion', type_='foreignkey')
op.drop_column('gameversion', 'game_id')
### end Alembic commands ###
| """add multigame tables
Revision ID: 4e0500347ce7
Revises: 29344aa34d9
Create Date: 2016-03-30 12:26:36.632566
"""
# revision identifiers, used by Alembic.
revision = '4e0500347ce7'
down_revision = '29344aa34d9'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.create_table(
'publisher',
sa.Column('id', sa.Integer, primary_key=True,autoincrement=True),
sa.Column('name', sa.String(50), nullable=False),
sa.Column('description', sa.Unicode(200)),
)
op.create_table(
'game',
sa.Column('id', sa.Integer, primary_key=True,autoincrement=True),
sa.Column('name', sa.String(50), nullable=False),
sa.Column('description', sa.Unicode(200)),
sa.Column('theme', sa.Integer, nullable=True),
sa.Column('publisher', sa.Integer, nullable=False),
)
op.create_table(
'theme',
sa.Column('id', sa.Integer, primary_key=True,autoincrement=True),
sa.Column('name', sa.String(50), nullable=False),
sa.Column('css', sa.String(50), nullable=False),
sa.Column('description', sa.Unicode(200)),
)
op.add_column('gameversion', sa.Column('game', sa.Integer, nullable=False))
op.add_column('mod', sa.Column('game', sa.Integer, nullable=False))
def downgrade():
op.drop_table('publisher')
op.drop_table('game')
op.drop_table('theme')
op.drop_column('gameversion', 'game')
op.drop_column('mod', 'game')
| Python | 0.000033 |
10f8deb343d17e73185bef916396a80c73b718ed | Add link to migration guide (#10821) | conans/pylint_plugin.py | conans/pylint_plugin.py | """Pylint plugin for ConanFile"""
import re
import astroid
from astroid import MANAGER
from pylint.checkers import BaseChecker
from pylint.interfaces import IRawChecker
def register(linter):
"""required method to auto register this checker"""
linter.register_checker(ConanDeprecatedImportsChecker(linter))
def transform_conanfile(node):
"""Transform definition of ConanFile class so dynamic fields are visible to pylint"""
str_class = astroid.builtin_lookup("str")
info_class = MANAGER.ast_from_module_name("conans.model.info").lookup(
"ConanInfo")
build_requires_class = MANAGER.ast_from_module_name(
"conans.client.graph.graph_manager").lookup("_RecipeBuildRequires")
file_copier_class = MANAGER.ast_from_module_name(
"conans.client.file_copier").lookup("FileCopier")
file_importer_class = MANAGER.ast_from_module_name(
"conans.client.importer").lookup("_FileImporter")
python_requires_class = MANAGER.ast_from_module_name(
"conans.client.graph.python_requires").lookup("PyRequires")
dynamic_fields = {
"conan_data": str_class,
"build_requires": build_requires_class,
"info_build": info_class,
"info": info_class,
"copy": file_copier_class,
"copy_deps": file_importer_class,
"python_requires": [str_class, python_requires_class],
"recipe_folder": str_class,
}
for f, t in dynamic_fields.items():
node.locals[f] = [t]
MANAGER.register_transform(
astroid.ClassDef, transform_conanfile,
lambda node: node.qname() == "conans.model.conan_file.ConanFile")
def _python_requires_member():
return astroid.parse("""
from conans.client.graph.python_requires import ConanPythonRequire
python_requires = ConanPythonRequire()
""")
astroid.register_module_extender(astroid.MANAGER, "conans", _python_requires_member)
class ConanDeprecatedImportsChecker(BaseChecker):
"""
Check "from conans*" imports which disappears in Conan 2.x. Only "from conan*" is valid
"""
__implements__ = IRawChecker
deprecated_imports_pattern = re.compile(r"(from|import)\s+conans[\.|\s].*")
name = "conan_deprecated_imports"
msgs = {
"E9000": (
"Using deprecated imports from 'conans'. Check migration guide at https://docs.conan.io/en/latest/conan_v2.html",
"conan1.x-deprecated-imports",
(
"Use imports from 'conan' instead of 'conans'"
" because 'conan' will be the root package for Conan 2.x"
)
)
}
options = ()
def process_module(self, node):
"""
Processing the module's content that is accessible via node.stream() function
"""
with node.stream() as stream:
for (index, line) in enumerate(stream):
if self.deprecated_imports_pattern.match(line.decode('utf-8')):
self.add_message("conan1.x-deprecated-imports", line=index + 1)
| """Pylint plugin for ConanFile"""
import re
import astroid
from astroid import MANAGER
from pylint.checkers import BaseChecker
from pylint.interfaces import IRawChecker
def register(linter):
"""required method to auto register this checker"""
linter.register_checker(ConanDeprecatedImportsChecker(linter))
def transform_conanfile(node):
"""Transform definition of ConanFile class so dynamic fields are visible to pylint"""
str_class = astroid.builtin_lookup("str")
info_class = MANAGER.ast_from_module_name("conans.model.info").lookup(
"ConanInfo")
build_requires_class = MANAGER.ast_from_module_name(
"conans.client.graph.graph_manager").lookup("_RecipeBuildRequires")
file_copier_class = MANAGER.ast_from_module_name(
"conans.client.file_copier").lookup("FileCopier")
file_importer_class = MANAGER.ast_from_module_name(
"conans.client.importer").lookup("_FileImporter")
python_requires_class = MANAGER.ast_from_module_name(
"conans.client.graph.python_requires").lookup("PyRequires")
dynamic_fields = {
"conan_data": str_class,
"build_requires": build_requires_class,
"info_build": info_class,
"info": info_class,
"copy": file_copier_class,
"copy_deps": file_importer_class,
"python_requires": [str_class, python_requires_class],
"recipe_folder": str_class,
}
for f, t in dynamic_fields.items():
node.locals[f] = [t]
MANAGER.register_transform(
astroid.ClassDef, transform_conanfile,
lambda node: node.qname() == "conans.model.conan_file.ConanFile")
def _python_requires_member():
return astroid.parse("""
from conans.client.graph.python_requires import ConanPythonRequire
python_requires = ConanPythonRequire()
""")
astroid.register_module_extender(astroid.MANAGER, "conans", _python_requires_member)
class ConanDeprecatedImportsChecker(BaseChecker):
"""
Check "from conans*" imports which disappears in Conan 2.x. Only "from conan*" is valid
"""
__implements__ = IRawChecker
deprecated_imports_pattern = re.compile(r"(from|import)\s+conans[\.|\s].*")
name = "conan_deprecated_imports"
msgs = {
"E9000": (
"Using deprecated imports from 'conans'",
"conan1.x-deprecated-imports",
(
"Use imports from 'conan' instead of 'conans'"
" because 'conan' will be the root package for Conan 2.x"
)
)
}
options = ()
def process_module(self, node):
"""
Processing the module's content that is accessible via node.stream() function
"""
with node.stream() as stream:
for (index, line) in enumerate(stream):
if self.deprecated_imports_pattern.match(line.decode('utf-8')):
self.add_message("conan1.x-deprecated-imports", line=index + 1)
| Python | 0 |
6fb6fdee06410d1d051134f0b9dcb47ad2ac1885 | Simplify code around re-raising an error. | azurectl/setup_account_task.py | azurectl/setup_account_task.py | # Copyright (c) 2015 SUSE Linux GmbH. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
usage: azurectl setup account -h | --help
azurectl setup account list
azurectl setup account remove --name=<configname>
azurectl setup account add --name=<configname> --publish-settings-file=<file> --storage-account-name=<storagename> --container-name=<containername>
[--subscription-id=<subscriptionid>]
azurectl setup account help
commands:
list
list configured account sections
remove
remove specified account section
add
add a new account section to the config file
help
show manual page for config command
options:
--name=<configname>
section name to identify this account
--publish-settings-file=<file>
path to the Microsoft Azure account publish settings file
--storage-account-name=<storagename>
storage account name to use by default
--container-name=<containername>
container name for storage account to use by default
--subscription-id=<subscriptionid>
subscription id, if more than one subscription is included in your
publish settings file.
"""
import sys
# project
from cli_task import CliTask
from logger import log
from help import Help
from account_setup import AccountSetup
from data_collector import DataCollector
from data_output import DataOutput
from azurectl_exceptions import AzureAccountLoadFailed
from config_file_path import ConfigFilePath
class SetupAccountTask(CliTask):
"""
Process setup config commands
"""
def __init__(self, load_config=True):
"""
Override CliTask's init, gracefully handle the case
where config file does not exist, so a new one may be added.
"""
try:
CliTask.__init__(self, load_config)
self.config_file = self.config.config_file
except AzureAccountLoadFailed:
if self.command_args['add']:
self.config_file = ConfigFilePath().default_new_config()
else:
raise
def process(self):
self.manual = Help()
if self.__help():
return
self.setup = AccountSetup(self.config_file)
self.result = DataCollector()
self.out = DataOutput(
self.result,
self.global_args['--output-format'],
self.global_args['--output-style']
)
if self.command_args['list']:
self.__list()
elif self.command_args['remove']:
self.__remove()
elif self.command_args['add']:
self.__add()
def __help(self):
if self.command_args['help']:
self.manual.show('azurectl::setup::account')
else:
return False
return self.manual
def __add(self):
if self.setup.add(
self.command_args['--name'],
self.command_args['--publish-settings-file'],
self.command_args['--storage-account-name'],
self.command_args['--container-name'],
self.command_args['--subscription-id']
):
log.info('Added Account %s', self.command_args['--name'])
def __list(self):
account_info = self.setup.list()
if not account_info:
log.info('There are no accounts configured')
else:
self.result.add('accounts', account_info)
self.out.display()
def __remove(self):
if self.setup.remove(
self.command_args['--name']
):
log.info('Removed Account %s', self.command_args['--name'])
| # Copyright (c) 2015 SUSE Linux GmbH. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
usage: azurectl setup account -h | --help
azurectl setup account list
azurectl setup account remove --name=<configname>
azurectl setup account add --name=<configname> --publish-settings-file=<file> --storage-account-name=<storagename> --container-name=<containername>
[--subscription-id=<subscriptionid>]
azurectl setup account help
commands:
list
list configured account sections
remove
remove specified account section
add
add a new account section to the config file
help
show manual page for config command
options:
--name=<configname>
section name to identify this account
--publish-settings-file=<file>
path to the Microsoft Azure account publish settings file
--storage-account-name=<storagename>
storage account name to use by default
--container-name=<containername>
container name for storage account to use by default
--subscription-id=<subscriptionid>
subscription id, if more than one subscription is included in your
publish settings file.
"""
import sys
# project
from cli_task import CliTask
from logger import log
from help import Help
from account_setup import AccountSetup
from data_collector import DataCollector
from data_output import DataOutput
from azurectl_exceptions import AzureAccountLoadFailed
from config_file_path import ConfigFilePath
class SetupAccountTask(CliTask):
"""
Process setup config commands
"""
def __init__(self, load_config=True):
"""
Override CliTask's init, gracefully handle the case
where config file does not exist, so a new one may be added.
"""
try:
CliTask.__init__(self, load_config)
self.config_file = self.config.config_file
except AzureAccountLoadFailed:
t, v, tb = sys.exc_info()
if self.command_args['add']:
self.config_file = ConfigFilePath().default_new_config()
else:
raise t, v, tb
def process(self):
self.manual = Help()
if self.__help():
return
self.setup = AccountSetup(self.config_file)
self.result = DataCollector()
self.out = DataOutput(
self.result,
self.global_args['--output-format'],
self.global_args['--output-style']
)
if self.command_args['list']:
self.__list()
elif self.command_args['remove']:
self.__remove()
elif self.command_args['add']:
self.__add()
def __help(self):
if self.command_args['help']:
self.manual.show('azurectl::setup::account')
else:
return False
return self.manual
def __add(self):
if self.setup.add(
self.command_args['--name'],
self.command_args['--publish-settings-file'],
self.command_args['--storage-account-name'],
self.command_args['--container-name'],
self.command_args['--subscription-id']
):
log.info('Added Account %s', self.command_args['--name'])
def __list(self):
account_info = self.setup.list()
if not account_info:
log.info('There are no accounts configured')
else:
self.result.add('accounts', account_info)
self.out.display()
def __remove(self):
if self.setup.remove(
self.command_args['--name']
):
log.info('Removed Account %s', self.command_args['--name'])
| Python | 0.999998 |
e642a82c4ed1e146fe64f97e3694355310abbcf7 | Add load and save methods | dem.py | dem.py | """ Classes for loading digital elevation models as numeric grids """
import os, sys
import numpy as np
from osgeo import gdal, gdalconst
import osr
class CalculationMixin(object):
def _caclulate_slope(self):
PAD_DX = 2
PAD_DY = 2
z_pad = self._pad_boundary(PAD_DX, PAD_DY)
slope_x = (z_pad[1:-1, 2:] - z_pad[1:-1, :-2])/(2*self.dx)
slope_y = (z_pad[2, 1:-1] - z_pad[:-2, 1:-1])/(2*self.dx)
return slope_x, slope_y
def _calculate_laplacian(self):
return self._calculate_directional_laplacian(0)
def _calculate_directional_laplacian(self, alpha):
dz_dx = np.diff(self._griddata, 1, 2)/self.dx
d2z_dxdy = np.diff(dz_dx, 1, 1)/self.dx
d2z_dx2 = np.diff(self._griddata, 1, 1)/self.dx**2
d2z_dy2 = np.diff(self._griddata, 2, 1)/self.dy**2
del2z = d2z_dx2*np.cos(alpha)**2 - 2*d2z_dxdy*np.sin(alpha)*np.cos(alpha) + d2z_dy2*np.sin(alpha)**2
return del2z
def _pad_boundary(self, dx, dy):
pad_x = np.zeros((self.ny, np.round(dx/2)))
self._griddata = np.hstack([pad_x, self._griddata, pad_x])
self.nx += 2*np.round(dx/2)
self.ny += 2*np.round(dy/2)
self.nx += 2*np.round(dx/2)
self.ny += 2*np.round(dy/2)
pad_y = np.zeros((np.round(dy/2), self.nx))
self._griddata = np.vstack([pad_y, self._griddata, pad_y])
class GDALMixin(object):
pass
class GeorefInfo(object):
pass
#class GeographicMixin(object):
# pass
class BaseSpatialGrid(GDALMixin):
def save(self, filename):
ncols = self._georef_info.nx
nrows = self._georef_info.ny
dx = self._georef_info.dx
if self._georef_info.dy is not None:
dy = self._georef_info.dy
else:
dy = dx
x_origin = self._georef_info.xllcenter
y_origin = self._georef_info.yllcenter
driver = gdal.GetDriverByName(GDAL_DRIVER_NAME)
out_raster = driver.Create(filename, ncols, nrows, 1, dtype)
out_raster.SetGeoTransform((x_origin, dx, 0, y_origin, dy))
out_band = out_raster.GetRasterBand(1)
out_band.WriteArray(self._griddata)
srs = osr.SpatialReference()
srs.ImportFromEPSG(EPSG_CODE)
out_raster.SetProjection(srs.ExportToWkt())
out_band.FlushCache()
@classmethod
def load(cls, filename):
return_object = cls()
gdal_dataset = gdal.Open(filename)
band = gdal_dataset.GetRasterBand(1)
nodata = gdal_dataset.GetNoDataValue()
return_object._griddata = band.ReadAsArray(cls.dtype)
if nodata is not None:
nodata_index = np.where(return_object._griddata == nodata)
if cls.dtype is not np.uint8:
return_object._griddata[nodata_index] = np.NAN
geo_transform = gdal_dataset.GetGeoTransform()
nx = gdal_dataset.RasterXSize
ny = gdal_dataset.RasterYSize
return_object._georef_info.geo_transform = geo_transform
return_object._georef_info.dx = return_object.georef_info.geo_transform[1]
return_object._georef_info.xllcenter = return_object.georef_info.geo_transform[0] + return_object.georef_info.dx
return_object._georef_info.yllcenter = return_object.georef_info.geo_transform[3] - return_object.georef_info.dy
return_object._georef_info.nx = nx
return_object._georef_info.ny = ny
return return_object
class DEMGrid(BaseGrid, CalculationMixin):
pass
| """ Classes for loading digital elevation models as numeric grids """
import os, sys
import numpy as np
from osgeo import gdal, gdal_const
class CalculationMixin(object):
def _caclulate_slope(self):
PAD_DX = 2
PAD_DY = 2
z_pad = self._pad_boundary(PAD_DX, PAD_DY)
slope_x = (z_pad[1:-1, 2:] - z_pad[1:-1, :-2])/(2*self.dx)
slope_y = (z_pad[2, 1:-1] - z_pad[:-2, 1:-1])/(2*self.dx)
return slope_x, slope_y
def _calculate_laplacian(self):
return self._calculate_directional_laplacian(0)
def _calculate_directional_laplacian(self, alpha):
dz_dx = np.diff(self._griddata, 1, 2)/self.dx
d2z_dxdy = np.diff(dz_dx, 1, 1)/self.dx
d2z_dx2 = np.diff(self._griddata, 1, 1)/self.dx**2
d2z_dy2 = np.diff(self._griddata, 2, 1)/self.dy**2
del2z = d2z_dx2*np.cos(alpha)**2 - 2*d2z_dxdy*np.sin(alpha)*np.cos(alpha) + d2z_dy2*np.sin(alpha)**2
return del2z
def _pad_boundary(self, dx, dy):
pad_x = np.zeros((self.ny, np.round(dx/2)))
self._griddata = np.hstack([pad_x, self._griddata, pad_x])
self.nx += 2*np.round(dx/2)
self.ny += 2*np.round(dy/2)
pad_y = np.zeros((np.round(dy/2), self.nx))
self._griddata = np.vstack([pad_y, self._griddata, pad_y])
class GDALMixin(object):
pass
class GeorefInfo(object):
pass
#class GeographicMixin(object):
# pass
class BaseGrid(GDALMixin):
pass
class DEMGrid(BaseGrid, CalculationMixin):
pass
| Python | 0 |
bd98a1b1119b34a5435855478d733ca582ebcf0c | Update version to dev | powerpool/__init__.py | powerpool/__init__.py | __version__ = "0.6.3-dev"
__version_info__ = (0, 6, 3)
| __version__ = "0.6.2"
__version_info__ = (0, 6, 2)
| Python | 0 |
49cf5af6c62bb23c8fce660f4b649bb0775ecdbc | 494. Target Sum | problems/test_0494.py | problems/test_0494.py | import unittest
from typing import List
import utils
# O(len(nums) * (sum(nums) + max(nums))) time. O(len(nums) * (sum(nums) + max(nums))) space. DP, 0-1 knapsack.
class Solution:
def findTargetSumWays(self, nums: List[int], S: int) -> int:
if not nums:
return 1 if S == 0 else 0
sum_ = sum(nums)
if not (-sum_ <= S <= sum_):
return 0
max_ = max(nums)
bound = sum_ + max_
range_ = bound * 2 + 1
# dp[i][j]: how many ways to assign symbols to make sum of nums[:i] equal to target j
dp = [[0] * range_ for _ in range(len(nums) + 1)]
dp[0][0] = 1
for i in range(1, len(nums) + 1):
num = nums[i - 1]
for j in range(-sum_, sum_ + 1):
dp[i][j] = dp[i - 1][j - num] + dp[i - 1][j + num]
return dp[len(nums)][S]
class Test(unittest.TestCase):
def test(self):
cases = utils.load_test_json(__file__).test_cases
for case in cases:
args = str(case.args)
actual = Solution().findTargetSumWays(**case.args.__dict__)
self.assertEqual(case.expected, actual, msg=args)
if __name__ == '__main__':
unittest.main()
| import unittest
from typing import List
import utils
# O(len(nums) * sum(nums)) time. O(len(nums) * sum(nums)) space. DP, 0-1 knapsack.
class Solution:
def findTargetSumWays(self, nums: List[int], S: int) -> int:
sum_nums = sum(nums)
if not (-sum_nums <= S <= sum_nums):
return 0
max_num = max(nums)
# dp[i][j]: how many ways to assign symbols to make sum of nums[:i] equal to target j
dp = [[0] * ((sum_nums + max_num) * 2 + 1) for _ in range(len(nums) + 1)]
dp[0][0] = 1
for i in range(1, len(nums) + 1):
num = nums[i - 1]
for j in range(-sum_nums - num, sum_nums + num + 1):
dp[i][j] = dp[i - 1][j - num] + dp[i - 1][j + num]
return dp[len(nums)][S]
class Test(unittest.TestCase):
def test(self):
cases = utils.load_test_json(__file__).test_cases
for case in cases:
args = str(case.args)
actual = Solution().findTargetSumWays(**case.args.__dict__)
self.assertEqual(case.expected, actual, msg=args)
if __name__ == '__main__':
unittest.main()
| Python | 0.999999 |
6da626bf1a999101af188d3d20710a6dddc8dbae | shell=True | ide.py | ide.py | # NOTE: pass -d to this to print debugging info when the server crashes.
from flask import Flask, render_template, url_for, request
from subprocess import Popen, PIPE, check_call
import sys, os, string, glob, logging
app = Flask(__name__)
app.logger.addHandler(logging.StreamHandler(sys.stdout))
app.logger.setLevel(logging.ERROR)
def compileO():
r = check_call(['gcc', 'o.c', '-DIDE', '-o', 'o-ide', '-lm'])
print("o-ide: " + "".join(glob.glob("o-ide*")))
if r != 0:
print("O code could not be compile. Error: " + r)
@app.route('/', methods=['GET', 'POST'])
def index():
url_for('static', filename='logo.ico')
if request.method == 'POST':
#Check files that start with 'o-ide*'
files = glob.glob("o-ide*")
print(files)
#Check if C was compiled
if len(files) < 1:
print("Compiling O...")
compileO()
#Run code
code = request.form['code']
input = request.form['input'].replace('\r\n', '\n')
print('Got code:', code, 'input:', input)
print('Running O code...')
p = Popen(['o-ide', '-e', code], stdout=PIPE, stderr=PIPE, stdin=PIPE, universal_newlines=True, shell=True)
output, error = p.communicate(input)
#Output to IDE
print('Output:', output, 'error:', error)
if p.returncode:
return render_template('error.html', code=code, input=input, error=error)
else:
return render_template('code.html', code=code, input=input, output=output, stack=error or '[]')
else:
return render_template('primary.html')
@app.route('/link/')
@app.route('/link/<link>')
def link(link='code="Error in linking code"o&input='):
url_for('static', filename='logo.ico')
print('Link:', link)
return render_template('link.html', link=link)
if __name__ == '__main__':
print('Compiling O...')
compileO()
print('Starting server...')
app.run(debug='-d' in sys.argv[1:]) | # NOTE: pass -d to this to print debugging info when the server crashes.
from flask import Flask, render_template, url_for, request
from subprocess import Popen, PIPE, check_call
import sys, os, string, glob, logging
app = Flask(__name__)
app.logger.addHandler(logging.StreamHandler(sys.stdout))
app.logger.setLevel(logging.ERROR)
def compileO():
r = check_call(['gcc', 'o.c', '-DIDE', '-o', 'o-ide', '-lm'])
print("o-ide: " + "".join(glob.glob("o-ide*")))
if r != 0:
print("O code could not be compile. Error: " + r)
@app.route('/', methods=['GET', 'POST'])
def index():
url_for('static', filename='logo.ico')
if request.method == 'POST':
#Check files that start with 'o-ide*'
files = glob.glob("o-ide*")
print(files)
#Check if C was compiled
if len(files) < 1:
print("Compiling O...")
compileO()
#Run code
code = request.form['code']
input = request.form['input'].replace('\r\n', '\n')
print('Got code:', code, 'input:', input)
print('Running O code...')
p = Popen(['o-ide', '-e', code], stdout=PIPE, stderr=PIPE, stdin=PIPE, universal_newlines=True)
output, error = p.communicate(input)
#Output to IDE
print('Output:', output, 'error:', error)
if p.returncode:
return render_template('error.html', code=code, input=input, error=error)
else:
return render_template('code.html', code=code, input=input, output=output, stack=error or '[]')
else:
return render_template('primary.html')
@app.route('/link/')
@app.route('/link/<link>')
def link(link='code="Error in linking code"o&input='):
url_for('static', filename='logo.ico')
print('Link:', link)
return render_template('link.html', link=link)
if __name__ == '__main__':
print('Compiling O...')
compileO()
print('Starting server...')
app.run(debug='-d' in sys.argv[1:]) | Python | 0.999989 |
ff99addf5ac6589b4ee2c53ef1debf4e9c07b47d | Bump version 0.2 Stable | __tryton__.py | __tryton__.py | #This file is part of Tryton. The COPYRIGHT file at the top level of
#this repository contains the full copyright notices and license terms.
{
'name': 'Nereid Catalog',
'version': '2.0.0.2',
'author': 'Openlabs Technologies & Consulting (P) LTD',
'email': 'info@openlabs.co.in',
'website': 'http://www.openlabs.co.in/',
'description': '''Nereid Catalog''',
'depends': [
'product',
'nereid',
],
'xml': [
'product.xml',
'urls.xml',
],
'translation': [
],
}
| #This file is part of Tryton. The COPYRIGHT file at the top level of
#this repository contains the full copyright notices and license terms.
{
'name': 'Nereid Catalog',
'version': '2.0.0.1',
'author': 'Openlabs Technologies & Consulting (P) LTD',
'email': 'info@openlabs.co.in',
'website': 'http://www.openlabs.co.in/',
'description': '''Nereid Catalog''',
'depends': [
'product',
'nereid',
],
'xml': [
'product.xml',
'urls.xml',
],
'translation': [
],
}
| Python | 0.000001 |
d826e54996bc504d245286b50f7ab5671f1999ae | Update solution.py | data_structures/linked_list/problems/find_pattern_in_linked_list/py/solution.py | data_structures/linked_list/problems/find_pattern_in_linked_list/py/solution.py | import LinkedList
# Problem description: Find a string pattern represented as a linked list in a target linked list.
# Solution time complexity: O(n^2)
# Comments: A brute force solution w/o any optimizations. Simply traverse a list looking for the pattern.
# If the node traversing the "pattern" list ever reaches the end (i.e. pnode == null), it is in
# the list. The case where a pnode may be equal to null due to the pattern being null, is ruled
# out by a test at the beginning of the function.
# Linked List Node inside the LinkedList module is defined as:
#
# class Node:
# def __init__(self, val, nxt=None):
# self.val = val
# self.nxt = nxt
#
def FindPatternInLinkedList(head: LinkedList.Node, pattern: LinkedList.Node) -> int:
if head == None or pattern == None:
return -1
index = 0
tslow = head
pnode = pattern
while tslow != None:
if tslow.val == pattern.val:
tfast = tslow
pnode = pattern
while tfast != None and pnode != None:
if tfast.val == pnode.val:
tfast = tfast.nxt
pnode = pnode.nxt
else:
break
if pnode == None:
return index
tslow = tslow.nxt
index += 1
return -1
| import LinkedList
# Problem description: Find a pattern represented as a linked list in a target linked list.
# Solution time complexity: O(n^2)
# Comments: A brute force solution w/o any optimizations. Simply traverse a list looking for the pattern.
# If the node traversing the "pattern" list ever reaches the end (i.e. pnode == null), it is in
# the list. The case where a pnode may be equal to null due to the pattern being null, is ruled
# out by a test at the beginning of the function.
# Linked List Node inside the LinkedList module is defined as:
#
# class Node:
# def __init__(self, val, nxt=None):
# self.val = val
# self.nxt = nxt
#
def FindPatternInLinkedList(head: LinkedList.Node, pattern: LinkedList.Node) -> int:
if head == None or pattern == None:
return -1
index = 0
tslow = head
pnode = pattern
while tslow != None:
if tslow.val == pattern.val:
tfast = tslow
pnode = pattern
while tfast != None and pnode != None:
if tfast.val == pnode.val:
tfast = tfast.nxt
pnode = pnode.nxt
else:
break
if pnode == None:
return index
tslow = tslow.nxt
index += 1
return -1
| Python | 0.000001 |
7ceaec12381e8bc7f597b1cc32d50655d30d9843 | use inplace installs (#5865) | nox.py | nox.py | # Copyright 2017, Google LLC All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
import os
import nox
LOCAL_DEPS = (
os.path.join('..', 'api_core'),
)
@nox.session
def default(session):
"""Default unit test session.
This is intended to be run **without** an interpreter set, so
that the current ``python`` (on the ``PATH``) or the version of
Python corresponding to the ``nox`` binary on the ``PATH`` can
run the tests.
"""
# Install all test dependencies, then install local packages in-place.
session.install('mock', 'pytest', 'pytest-cov')
for local_dep in LOCAL_DEPS:
session.install('-e', local_dep)
session.install('-e', '.')
# Run py.test against the unit tests.
session.run(
'py.test',
'--quiet',
'--cov=google.cloud.container',
'--cov=google.cloud.container_v1',
'--cov=tests.unit',
'--cov-append',
'--cov-config=.coveragerc',
'--cov-report=',
os.path.join('tests', 'unit'),
*session.posargs
)
@nox.session
@nox.parametrize('py', ['2.7', '3.5', '3.6', '3.7'])
def unit(session, py):
"""Run the unit test suite."""
session.interpreter = 'python{}'.format(py)
session.virtualenv_dirname = 'unit-' + py
default(session)
@nox.session
@nox.parametrize('py', ['2.7', '3.6'])
def system(session, py):
"""Run the system test suite."""
# Sanity check: Only run system tests if the environment variable is set.
if not os.environ.get('GOOGLE_APPLICATION_CREDENTIALS', ''):
session.skip('Credentials must be set via environment variable.')
# Run the system tests against latest Python 2 and Python 3 only.
session.interpreter = 'python{}'.format(py)
# Set the virtualenv dirname.
session.virtualenv_dirname = 'sys-' + py
# Use pre-release gRPC for system tests.
session.install('--pre', 'grpcio')
# Install all test dependencies, then install this package into the
# virtualenv's dist-packages.
session.install('mock', 'pytest')
for local_dep in LOCAL_DEPS:
session.install('-e', local_dep)
session.install('-e', '../test_utils/')
session.install('-e', '.')
# Run py.test against the system tests.
session.run('py.test', '--quiet', 'tests/system/')
@nox.session
def lint_setup_py(session):
"""Verify that setup.py is valid (including RST check)."""
session.interpreter = 'python3.6'
session.install('docutils', 'pygments')
session.run('python', 'setup.py', 'check', '--restructuredtext',
'--strict')
| # Copyright 2017, Google LLC All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
import os
import nox
LOCAL_DEPS = (
os.path.join('..', 'api_core'),
)
@nox.session
def default(session):
"""Default unit test session.
This is intended to be run **without** an interpreter set, so
that the current ``python`` (on the ``PATH``) or the version of
Python corresponding to the ``nox`` binary on the ``PATH`` can
run the tests.
"""
# Install all test dependencies, then install this package in-place.
session.install('mock', 'pytest', 'pytest-cov', *LOCAL_DEPS)
session.install('-e', '.')
# Run py.test against the unit tests.
session.run(
'py.test',
'--quiet',
'--cov=google.cloud.container',
'--cov=google.cloud.container_v1',
'--cov=tests.unit',
'--cov-append',
'--cov-config=.coveragerc',
'--cov-report=',
os.path.join('tests', 'unit'),
*session.posargs
)
@nox.session
@nox.parametrize('py', ['2.7', '3.5', '3.6', '3.7'])
def unit(session, py):
"""Run the unit test suite."""
session.interpreter = 'python{}'.format(py)
session.virtualenv_dirname = 'unit-' + py
default(session)
@nox.session
@nox.parametrize('py', ['2.7', '3.6'])
def system(session, py):
"""Run the system test suite."""
# Sanity check: Only run system tests if the environment variable is set.
if not os.environ.get('GOOGLE_APPLICATION_CREDENTIALS', ''):
session.skip('Credentials must be set via environment variable.')
# Run the system tests against latest Python 2 and Python 3 only.
session.interpreter = 'python{}'.format(py)
# Set the virtualenv dirname.
session.virtualenv_dirname = 'sys-' + py
# Use pre-release gRPC for system tests.
session.install('--pre', 'grpcio')
# Install all test dependencies, then install this package into the
# virtualenv's dist-packages.
session.install('mock', 'pytest')
session.install('../test_utils/')
session.install('.')
# Run py.test against the system tests.
session.run('py.test', '--quiet', 'tests/system/')
@nox.session
def lint_setup_py(session):
"""Verify that setup.py is valid (including RST check)."""
session.interpreter = 'python3.6'
session.install('docutils', 'pygments')
session.run('python', 'setup.py', 'check', '--restructuredtext',
'--strict')
| Python | 0 |
7c527f486e2e129861915f73e0625ec00388e15e | Fix failing MPI tests | test/hoomd_script/test_multiple_contexts.py | test/hoomd_script/test_multiple_contexts.py | # -*- coding: iso-8859-1 -*-
from hoomd_script import *
import hoomd_script;
context.initialize()
import unittest
import os
# unit test to run a simple polymer system with pair and bond potentials
class multi_context(unittest.TestCase):
def test_run(self):
self.c1 = context.SimulationContext()
self.c2 = context.SimulationContext()
with self.c1:
init.create_random(N=2000, phi_p=0.2)
lj = pair.lj(r_cut=3.0)
lj.pair_coeff.set('A', 'A', epsilon=1.0, sigma=1.0)
integrate.mode_standard(dt=0.005)
integrate.nvt(group=group.all(), T=1.2, tau=0.5)
with self.c2:
init.create_random(N=1000, phi_p=0.02)
lj = pair.lj(r_cut=3.0)
lj.pair_coeff.set('A', 'A', epsilon=1.0, sigma=1.0)
integrate.mode_standard(dt=0.005)
integrate.nvt(group=group.all(), T=1.2, tau=0.5)
with self.c1:
run(10)
with self.c2:
run(10)
if __name__ == '__main__':
unittest.main(argv = ['test.py', '-v'])
| # -*- coding: iso-8859-1 -*-
from hoomd_script import *
import hoomd_script;
context.initialize()
import unittest
import os
# unit test to run a simple polymer system with pair and bond potentials
class multi_context(unittest.TestCase):
def test_run(self):
self.c1 = context.SimulationContext()
self.c2 = context.SimulationContext()
with self.c1:
init.create_random(N=500, phi_p=0.2)
lj = pair.lj(r_cut=3.0)
lj.pair_coeff.set('A', 'A', epsilon=1.0, sigma=1.0)
integrate.mode_standard(dt=0.005)
integrate.nvt(group=group.all(), T=1.2, tau=0.5)
with self.c2:
init.create_random(N=200, phi_p=0.02)
lj = pair.lj(r_cut=3.0)
lj.pair_coeff.set('A', 'A', epsilon=1.0, sigma=1.0)
integrate.mode_standard(dt=0.005)
integrate.nvt(group=group.all(), T=1.2, tau=0.5)
with self.c1:
run(10)
with self.c2:
run(10)
if __name__ == '__main__':
unittest.main(argv = ['test.py', '-v'])
| Python | 0.000013 |
988ed4be9152632b2844e962e225adac63d869db | Fix crash on windows | spd.py | spd.py | #!/usr/bin/env python3
import os
import re
import sys
from platform import system as operatingSystem
from subprocess import call
from urllib.request import Request, urlopen
def getWebPage(url):
print("\ngetting: "+url)
h = Request(url)
h.add_header('User-Agent', 'SPD/1.0')
webpage = urlopen(h).read()
return str(webpage) # convert from bytes to string
def getSubmittedPage(userName):
return getWebPage('https://www.reddit.com/user/' +
userName +
'/submitted/')
def downloadImage(link):
print('downloading: ' + link)
# open wget in the background
if operatingSystem() == 'Windows':
# NUL ~ /dev/null
call(['wget', '-b', '-N', '-o', 'NUL', link])
else:
call(['wget', '-b', '-N', '-o', '/dev/null', link])
def downloadImageGallery(link):
webpage = getWebPage(link)
link = link.replace('http:', 'https:')
if re.search(r"gfycat\.com/", link):
link = link.replace('gfycat', 'giant.gfycat') + '.gif'
downloadImage(link)
elif re.search(r"imgur\.com/", link):
for image in getAllImageURLs(webpage):
downloadImage(image)
pass
def isGallery(link):
if re.match(r"https://(?:imgur\.com/|gfycat\.com/)", link):
return True
return False
def getAllImageURLs(webpage):
urlList = re.findall(
r'src="//(i\.imgur\.com/[a-zA-Z0-9]{7}\.(?:[a-z]{3,4})(?:\?[0-9]+?)?)"',
webpage)
return urlList
def getAllImages(webpage):
for link in re.findall(
"<a class=\"title may-blank ?\" href=\"(https?://" +
"(?:gfycat\\.com/[a-zA-Z]+|" +
"imgur\\.com/(?:[a-zA-Z0-9]{7}|a/[a-zA-Z0-9]{5})|" +
"i\\.imgur\\.com/[a-zA-Z0-9]{7}\\.(?:[a-z]{3,4})(?:\?[0-9]+?)?))",
webpage):
link = link.replace('http:', 'https:')
if isGallery(link):
downloadImageGallery(link)
else:
downloadImage(link)
def pageGetNextPage(webpage, userName):
nextPage = re.findall(
"(https?://www\\.reddit\\.com/user/" +
userName +
"/submitted/\\?count=[0-9]{2,4}&after=t[0-9]_[a-z0-9]{6})",
webpage)
if not nextPage == []:
return getWebPage(nextPage[0].replace('amp;', ''))
else:
return None
userName = sys.argv[1]
if len(sys.argv) > 2:
basePath = sys.argv[2]
else:
basePath = os.path.expanduser("~/Pictures/SPD/")
if not os.path.exists(basePath + userName):
os.makedirs(basePath + userName)
os.chdir(basePath + userName)
userSubmitted = getSubmittedPage(userName)
getAllImages(userSubmitted)
while True:
userSubmitted = pageGetNextPage(userSubmitted, userName)
if userSubmitted is None:
break
getAllImages(userSubmitted)
| #!/usr/bin/env python3
import os
import re
import sys
from subprocess import call
from urllib.request import Request, urlopen
def getWebPage(url):
print("\ngetting: "+url)
h = Request(url)
h.add_header('User-Agent', 'SPD/1.0')
webpage = urlopen(h).read()
return str(webpage) # convert from bytes to string
def getSubmittedPage(userName):
return getWebPage('https://www.reddit.com/user/' +
userName +
'/submitted/')
def downloadImage(link):
print('downloading: ' + link)
# open wget in the background
call(['wget', '-b', '-N', '-o', '/dev/null', link])
def downloadImageGallery(link):
webpage = getWebPage(link)
link = link.replace('http:', 'https:')
if re.search(r"gfycat\.com/", link):
link = link.replace('gfycat', 'giant.gfycat') + '.gif'
downloadImage(link)
elif re.search(r"imgur\.com/", link):
for image in getAllImageURLs(webpage):
downloadImage(image)
pass
def isGallery(link):
if re.match(r"https://(?:imgur\.com/|gfycat\.com/)", link):
return True
return False
def getAllImageURLs(webpage):
urlList = re.findall(
r'src="//(i\.imgur\.com/[a-zA-Z0-9]{7}\.(?:[a-z]{3,4})(?:\?[0-9]+?)?)"',
webpage)
return urlList
def getAllImages(webpage):
for link in re.findall(
"<a class=\"title may-blank ?\" href=\"(https?://" +
"(?:gfycat\\.com/[a-zA-Z]+|" +
"imgur\\.com/(?:[a-zA-Z0-9]{7}|a/[a-zA-Z0-9]{5})|" +
"i\\.imgur\\.com/[a-zA-Z0-9]{7}\\.(?:[a-z]{3,4})(?:\?[0-9]+?)?))",
webpage):
link = link.replace('http:', 'https:')
if isGallery(link):
downloadImageGallery(link)
else:
downloadImage(link)
def pageGetNextPage(webpage, userName):
nextPage = re.findall(
"(https?://www\\.reddit\\.com/user/" +
userName +
"/submitted/\\?count=[0-9]{2,4}&after=t[0-9]_[a-z0-9]{6})",
webpage)
if not nextPage == []:
return getWebPage(nextPage[0].replace('amp;', ''))
else:
return None
userName = sys.argv[1]
if not os.path.exists("~/Pictures/SPD/" + userName):
os.makedirs("~/Pictures/SPD/" + userName)
os.chdir("~/Pictures/SPD/" + userName)
userSubmitted = getSubmittedPage(userName)
getAllImages(userSubmitted)
while True:
userSubmitted = pageGetNextPage(userSubmitted, userName)
if userSubmitted is None:
break
getAllImages(userSubmitted)
| Python | 0 |
1e8094a187284961a380bea94bbc806aa4430a3d | fix to t2m.py | t2m.py | t2m.py | #! /usr/bin/env python
# Requires python 2.7
import sys
import socket
import datetime
import json
import logging
import os
if os.path.exists('/vagrant'):
logfilename = '/vagrant/.t2m.log'
else:
logfilename = '/tmp/.t2m.log'
logging.basicConfig(
filename=logfilename,
level=logging.DEBUG)
BUFSIZE = 1024
DEBUG = True
def main():
if len(sys.argv) < 2:
usage()
try:
server()
except Exception, e:
logging.critical("server() exited with exception %s" % str(e))
def usage():
sys.stdout = sys.stderr
print('Usage: %s listen_ip udp_port' % sys.argv[0])
sys.exit(2)
def debug(msg):
if DEBUG: logging.debug(msg)
def s_to_datetime(dts):
try:
dtobj = datetime.datetime.strptime(dts, "%d/%m/%Y %H:%M")
return dtobj
except ValueError:
return False
def parse_data(data):
"""
Quick n dirty data parsing.
string -> datetime, string
"""
dtobj = None
msg = None
if ']' not in data:
return dtobj, msg
try:
dts, msg = data.split(']')
dts = dts.strip('[')
if msg[0] != ' ':
return dtobj, msg
msg = msg[1:]
dtobj = s_to_datetime(dts)
except ValueError:
pass
return dtobj, msg
def server():
if len(sys.argv) > 2:
host = sys.argv[1]
port = eval(sys.argv[2])
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.bind((host, port))
debug('udp server ready on %s:%s' % (host,port))
while 1:
data, addr = s.recvfrom(BUFSIZE)
debug('server received %r from %r' % (data, addr))
dt, msg = parse_data(data)
if dt and msg:
#send data back
datadict = {
"timestamp": (dt - datetime.datetime(1970,1,1)).total_seconds(),
"message": msg
}
jsondata = json.dumps(datadict)
debug(jsondata)
print(jsondata)
s.sendto(jsondata, addr)
else:
debug('invalid message received.')
if __name__ == '__main__': main()
| #! /usr/bin/env python
# Requires python 2.7
import sys
import socket
import datetime
import json
import logging
if os.path.exists('/vagrant'):
logfilename = '/vagrant/.t2m.log'
else:
logfilename = '/tmp/.t2m.log'
logging.basicConfig(
filename=logfilename,
level=logging.DEBUG)
BUFSIZE = 1024
DEBUG = True
def main():
if len(sys.argv) < 2:
usage()
try:
server()
except Exception, e:
logging.critical("server() exited with exception %s" % str(e))
def usage():
sys.stdout = sys.stderr
print('Usage: %s listen_ip udp_port' % sys.argv[0])
sys.exit(2)
def debug(msg):
if DEBUG: logging.debug(msg)
def s_to_datetime(dts):
try:
dtobj = datetime.datetime.strptime(dts, "%d/%m/%Y %H:%M")
return dtobj
except ValueError:
return False
def parse_data(data):
"""
Quick n dirty data parsing.
string -> datetime, string
"""
dtobj = None
msg = None
if ']' not in data:
return dtobj, msg
try:
dts, msg = data.split(']')
dts = dts.strip('[')
if msg[0] != ' ':
return dtobj, msg
msg = msg[1:]
dtobj = s_to_datetime(dts)
except ValueError:
pass
return dtobj, msg
def server():
if len(sys.argv) > 2:
host = sys.argv[1]
port = eval(sys.argv[2])
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.bind((host, port))
debug('udp server ready on %s:%s' % (host,port))
while 1:
data, addr = s.recvfrom(BUFSIZE)
debug('server received %r from %r' % (data, addr))
dt, msg = parse_data(data)
if dt and msg:
#send data back
datadict = {
"timestamp": (dt - datetime.datetime(1970,1,1)).total_seconds(),
"message": msg
}
jsondata = json.dumps(datadict)
debug(jsondata)
print(jsondata)
s.sendto(jsondata, addr)
else:
debug('invalid message received.')
if __name__ == '__main__': main()
| Python | 0.999996 |
96f93e39ad12d893d8672dd2bb2abea4e6020799 | update the file name in the send file to uri | bin/utils/sftp_transactions.py | bin/utils/sftp_transactions.py | import pysftp as sftp
import sys
import os
from email_transactions import email_transactions
import paramiko
# This addresses the issues with relative paths
file_dir = os.path.dirname(os.path.realpath(__file__))
goal_dir = os.path.join(file_dir, "../")
proj_root = os.path.abspath(goal_dir)+'/'
sys.path.insert(0, proj_root+'bin')
class sftp_transactions:
"""A class for handling the sftp transactions. This class contains
functions for getting a file from sftp server and putting a file
to a sftp server"""
def __init__(self):
self.data = []
def send_file_to_uri(self, site_URI, uname, password, remotepath, file_name, localpath, contact_email):
'''This function puts the specified file to the given uri.
Authentication is done using the uname and password
remotepath - the path where the file needs to be put
localpath - the path where the file is picked from
contact_email - the email of the concerned authority to mail to incase of failed
transaction
'''
# make a connection with uri and credentials
bridge = paramiko.Transport((site_URI, 22))
bridge.connect(username = uname, password = password)
connect = paramiko.SFTPClient.from_transport(bridge)
# import here to eliminate circular dependancy
try:
connect.chdir(remotepath)
except IOError:
connect.mkdir(remotepath)
connect.chdir(remotepath)
try:
# put the file at the designated location in the server
connect.put(localpath, remotepath+file_name)
# close the connection
connect.close()
except Exception, e:
# closing the connection incase there is any exception
connect.close()
'''Report should be sent to the concerned authority with the error
message
'''
print 'Error sending file to '+site_URI
print 'Check the credentials/remotepath/localpath/Server URI'
email_transactions().send_mail('please-do-not-reply@ufl.edu', contact_email, str(e))
print str(e)
pass
def get_file_from_uri(self, site_URI, uname, password, remotepath, localpath, contact_email):
'''This function gets the specified file to the given uri.
Authentication is done using the uname and password
remotepath - the path where the file needs to be put
localpath - the path where the file is picked from
contact_email - the email of the concerned authority to mail to incase of failed
transaction
'''
# make a connection with uri and credentials
connect = sftp.Connection(host=site_URI, username=uname, password=password)
try:
# get the file from the designated location in the server
connect.get(remotepath, localpath)
# close the connection
connect.close()
except Exception, e:
# closing the connection incase there is any exception
connect.close()
'''Report should be sent to the concerned authority with the error
message
'''
email_transactions().send_mail('please-do-not-reply@ufl.edu', contact_email, str(e))
print str(e)
pass
| import pysftp as sftp
import sys
import os
from email_transactions import email_transactions
import paramiko
# This addresses the issues with relative paths
file_dir = os.path.dirname(os.path.realpath(__file__))
goal_dir = os.path.join(file_dir, "../")
proj_root = os.path.abspath(goal_dir)+'/'
sys.path.insert(0, proj_root+'bin')
class sftp_transactions:
"""A class for handling the sftp transactions. This class contains
functions for getting a file from sftp server and putting a file
to a sftp server"""
def __init__(self):
self.data = []
def send_file_to_uri(self, site_URI, uname, password, remotepath, localpath, contact_email):
'''This function puts the specified file to the given uri.
Authentication is done using the uname and password
remotepath - the path where the file needs to be put
localpath - the path where the file is picked from
contact_email - the email of the concerned authority to mail to incase of failed
transaction
'''
# make a connection with uri and credentials
bridge = paramiko.Transport((site_URI, 22))
bridge.connect(username = uname, password = password)
connect = paramiko.SFTPClient.from_transport(bridge)
# import here to eliminate circular dependancy
try:
connect.chdir(remotepath)
except IOError:
connect.mkdir(remotepath)
connect.chdir(remotepath)
try:
# put the file at the designated location in the server
connect.put(localpath, remotepath+'smi.xml')
# close the connection
connect.close()
except Exception, e:
# closing the connection incase there is any exception
connect.close()
'''Report should be sent to the concerned authority with the error
message
'''
print 'Error sending file to '+site_URI
print 'Check the credentials/remotepath/localpath/Server URI'
email_transactions().send_mail('please-do-not-reply@ufl.edu', contact_email, str(e))
print str(e)
pass
def get_file_from_uri(self, site_URI, uname, password, remotepath, localpath, contact_email):
'''This function gets the specified file to the given uri.
Authentication is done using the uname and password
remotepath - the path where the file needs to be put
localpath - the path where the file is picked from
contact_email - the email of the concerned authority to mail to incase of failed
transaction
'''
# make a connection with uri and credentials
connect = sftp.Connection(host=site_URI, username=uname, password=password)
try:
# get the file from the designated location in the server
connect.get(remotepath, localpath)
# close the connection
connect.close()
except Exception, e:
# closing the connection incase there is any exception
connect.close()
'''Report should be sent to the concerned authority with the error
message
'''
email_transactions().send_mail('please-do-not-reply@ufl.edu', contact_email, str(e))
print str(e)
pass | Python | 0.000001 |
0d4c041d239e7d7ed234f359ae483523b05e367b | correct the 'View Credentials' icon | openstack_dashboard/dashboards/project/access_and_security/api_access/tables.py | openstack_dashboard/dashboards/project/access_and_security/api_access/tables.py | # Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.template.defaultfilters import title # noqa
from django.utils.translation import ugettext_lazy as _
from openstack_auth import utils
from horizon import tables
from openstack_dashboard import api
from openstack_dashboard.dashboards.project.access_and_security.api_access \
import forms as project_forms
from openstack_dashboard import policy
def pretty_service_names(name):
name = name.replace('-', ' ')
if name in ['ec2', 's3']:
name = name.upper()
else:
name = title(name)
return name
class DownloadEC2(tables.LinkAction):
name = "download_ec2"
verbose_name = _("Download EC2 Credentials")
verbose_name_plural = _("Download EC2 Credentials")
icon = "download"
url = "horizon:project:access_and_security:api_access:ec2"
policy_rules = (("compute", "compute_extension:certificates"),)
def allowed(self, request, datum=None):
return api.base.is_service_enabled(request, 'ec2')
class DownloadOpenRC(tables.LinkAction):
name = "download_openrc"
verbose_name = _("Download OpenStack RC File v3")
verbose_name_plural = _("Download OpenStack RC File v3")
icon = "download"
url = "horizon:project:access_and_security:api_access:openrc"
def allowed(self, request, datum=None):
return utils.get_keystone_version() >= 3
class DownloadOpenRCv2(tables.LinkAction):
name = "download_openrc_v2"
verbose_name = _("Download OpenStack RC File v2.0")
verbose_name_plural = _("Download OpenStack RC File v2.0")
icon = "download"
url = "horizon:project:access_and_security:api_access:openrcv2"
class ViewCredentials(tables.LinkAction):
name = "view_credentials"
verbose_name = _("View Credentials")
classes = ("ajax-modal", )
icon = "eye"
url = "horizon:project:access_and_security:api_access:view_credentials"
class RecreateCredentials(tables.LinkAction):
name = "recreate_credentials"
verbose_name = _("Recreate EC2 Credentials")
classes = ("ajax-modal",)
icon = "refresh"
url = \
"horizon:project:access_and_security:api_access:recreate_credentials"
policy_rules = (("compute", "compute_extension:certificates"))
action_type = "danger"
def allowed(self, request, datum=None):
try:
target = {"target.credential.user_id": request.user.id}
if (api.base.is_service_enabled(request, 'ec2') and
project_forms.get_ec2_credentials(request) and
policy.check((("identity", "identity:ec2_create_credential"),
("identity", "identity:ec2_delete_credential")),
request, target=target)):
return True
except Exception:
pass
return False
class EndpointsTable(tables.DataTable):
api_name = tables.Column('type',
verbose_name=_("Service"),
filters=(pretty_service_names,))
api_endpoint = tables.Column('public_url',
verbose_name=_("Service Endpoint"))
class Meta(object):
name = "endpoints"
verbose_name = _("API Endpoints")
multi_select = False
table_actions = (DownloadOpenRCv2, DownloadOpenRC, DownloadEC2,
ViewCredentials, RecreateCredentials)
| # Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.template.defaultfilters import title # noqa
from django.utils.translation import ugettext_lazy as _
from openstack_auth import utils
from horizon import tables
from openstack_dashboard import api
from openstack_dashboard.dashboards.project.access_and_security.api_access \
import forms as project_forms
from openstack_dashboard import policy
def pretty_service_names(name):
name = name.replace('-', ' ')
if name in ['ec2', 's3']:
name = name.upper()
else:
name = title(name)
return name
class DownloadEC2(tables.LinkAction):
name = "download_ec2"
verbose_name = _("Download EC2 Credentials")
verbose_name_plural = _("Download EC2 Credentials")
icon = "download"
url = "horizon:project:access_and_security:api_access:ec2"
policy_rules = (("compute", "compute_extension:certificates"),)
def allowed(self, request, datum=None):
return api.base.is_service_enabled(request, 'ec2')
class DownloadOpenRC(tables.LinkAction):
name = "download_openrc"
verbose_name = _("Download OpenStack RC File v3")
verbose_name_plural = _("Download OpenStack RC File v3")
icon = "download"
url = "horizon:project:access_and_security:api_access:openrc"
def allowed(self, request, datum=None):
return utils.get_keystone_version() >= 3
class DownloadOpenRCv2(tables.LinkAction):
name = "download_openrc_v2"
verbose_name = _("Download OpenStack RC File v2.0")
verbose_name_plural = _("Download OpenStack RC File v2.0")
icon = "download"
url = "horizon:project:access_and_security:api_access:openrcv2"
class ViewCredentials(tables.LinkAction):
name = "view_credentials"
verbose_name = _("View Credentials")
classes = ("ajax-modal", )
icon = "plus"
url = "horizon:project:access_and_security:api_access:view_credentials"
class RecreateCredentials(tables.LinkAction):
name = "recreate_credentials"
verbose_name = _("Recreate EC2 Credentials")
classes = ("ajax-modal",)
icon = "refresh"
url = \
"horizon:project:access_and_security:api_access:recreate_credentials"
policy_rules = (("compute", "compute_extension:certificates"))
action_type = "danger"
def allowed(self, request, datum=None):
try:
target = {"target.credential.user_id": request.user.id}
if (api.base.is_service_enabled(request, 'ec2') and
project_forms.get_ec2_credentials(request) and
policy.check((("identity", "identity:ec2_create_credential"),
("identity", "identity:ec2_delete_credential")),
request, target=target)):
return True
except Exception:
pass
return False
class EndpointsTable(tables.DataTable):
api_name = tables.Column('type',
verbose_name=_("Service"),
filters=(pretty_service_names,))
api_endpoint = tables.Column('public_url',
verbose_name=_("Service Endpoint"))
class Meta(object):
name = "endpoints"
verbose_name = _("API Endpoints")
multi_select = False
table_actions = (DownloadOpenRCv2, DownloadOpenRC, DownloadEC2,
ViewCredentials, RecreateCredentials)
| Python | 0.000003 |
402e9515419f0db7f449eac9f810389a4608b878 | Comment out venue for now, since we don't have one yet | settings.py | settings.py | # -*- encoding: utf-8 -*-
import os
from wafer.settings import *
try:
from localsettings import *
except ImportError:
pass
from django.utils.translation import ugettext_lazy as _
from django.core.urlresolvers import reverse_lazy
pyconzadir = os.path.dirname(__file__)
STATICFILES_DIRS = (
os.path.join(pyconzadir, 'static'),
os.path.join(pyconzadir, 'bower_components'),
)
TEMPLATE_DIRS = (
os.path.join(pyconzadir, 'templates'),
) + TEMPLATE_DIRS
WAFER_MENUS += (
{"menu": "about", "label": _("About"),
"items": []},
#{"name": "venue", "label": _("Venue"),
# "url": reverse_lazy("wafer_page", args=("venue",))},
{"menu": "sponsors", "label": _("Sponsors"),
"items": [
{"name": "sponsors", "label": _("Our sponsors"),
"url": reverse_lazy("wafer_sponsors")},
{"name": "packages", "label": _("Sponsorship packages"),
"url": reverse_lazy("wafer_sponsorship_packages")},
]},
{"menu": "talks", "label": _("Talks"),
"items": [
{"name": "schedule", "label": _("Schedule"),
"url": reverse_lazy("wafer_full_schedule")},
{"name": "schedule-next-up", "label": _("Next up"),
# Once conference has started use:
# "url": reverse_lazy("wafer_current")},
"url": "/schedule/current/?day=2015-10-01&time=08:30"},
# {"name": "accepted-talks", "label": _("Accepted Talks"),
# "url": reverse_lazy("wafer_users_talks")},
]},
{"menu": "events", "label": _("News"),
"items": []},
{"menu": "previous-pycons", "label": _("Past PyConZAs"),
"items": [
{"name": "pyconza2012", "label": _("PyConZA 2012"),
"url": "http://2012.za.pycon.org/"},
{"name": "pyconza2013", "label": _("PyConZA 2013"),
"url": "http://2013.za.pycon.org/"},
{"name": "pyconza2014", "label": _("PyConZA 2014"),
"url": "http://2014.za.pycon.org/"},
]},
{"name": "twitter", "label": "Twitter",
"image": "/static/img/twitter.png",
"url": "https://twitter.com/pyconza"},
{"name": "googleplus", "label": "Google+",
"image": "/static/img/googleplus.png",
"url": "https://plus.google.com/u/0/events/cpnt1h6tf6m59k8i4uvhmrvguis"},
{"name": "facebook", "label": "Facebook",
"image": "/static/img/facebook.png",
"url": "https://www.facebook.com/events/1467196980209896/"},
)
CRISPY_TEMPLATE_PACK = 'bootstrap3'
MARKITUP_FILTER = ('markdown.markdown', {
'safe_mode': False,
'extensions': ['outline'],
})
# Use HTTPS jquery URL so it's accessible on HTTPS pages (e.g. editing a talk)
JQUERY_URL = 'https://ajax.googleapis.com/ajax/libs/jquery/2.0.3/jquery.min.js'
| # -*- encoding: utf-8 -*-
import os
from wafer.settings import *
try:
from localsettings import *
except ImportError:
pass
from django.utils.translation import ugettext_lazy as _
from django.core.urlresolvers import reverse_lazy
pyconzadir = os.path.dirname(__file__)
STATICFILES_DIRS = (
os.path.join(pyconzadir, 'static'),
os.path.join(pyconzadir, 'bower_components'),
)
TEMPLATE_DIRS = (
os.path.join(pyconzadir, 'templates'),
) + TEMPLATE_DIRS
WAFER_MENUS += (
{"menu": "about", "label": _("About"),
"items": []},
{"name": "venue", "label": _("Venue"),
"url": reverse_lazy("wafer_page", args=("venue",))},
{"menu": "sponsors", "label": _("Sponsors"),
"items": [
{"name": "sponsors", "label": _("Our sponsors"),
"url": reverse_lazy("wafer_sponsors")},
{"name": "packages", "label": _("Sponsorship packages"),
"url": reverse_lazy("wafer_sponsorship_packages")},
]},
{"menu": "talks", "label": _("Talks"),
"items": [
{"name": "schedule", "label": _("Schedule"),
"url": reverse_lazy("wafer_full_schedule")},
{"name": "schedule-next-up", "label": _("Next up"),
# Once conference has started use:
# "url": reverse_lazy("wafer_current")},
"url": "/schedule/current/?day=2015-10-01&time=08:30"},
# {"name": "accepted-talks", "label": _("Accepted Talks"),
# "url": reverse_lazy("wafer_users_talks")},
]},
{"menu": "events", "label": _("News"),
"items": []},
{"menu": "previous-pycons", "label": _("Past PyConZAs"),
"items": [
{"name": "pyconza2012", "label": _("PyConZA 2012"),
"url": "http://2012.za.pycon.org/"},
{"name": "pyconza2013", "label": _("PyConZA 2013"),
"url": "http://2013.za.pycon.org/"},
{"name": "pyconza2014", "label": _("PyConZA 2014"),
"url": "http://2014.za.pycon.org/"},
]},
{"name": "twitter", "label": "Twitter",
"image": "/static/img/twitter.png",
"url": "https://twitter.com/pyconza"},
{"name": "googleplus", "label": "Google+",
"image": "/static/img/googleplus.png",
"url": "https://plus.google.com/u/0/events/cpnt1h6tf6m59k8i4uvhmrvguis"},
{"name": "facebook", "label": "Facebook",
"image": "/static/img/facebook.png",
"url": "https://www.facebook.com/events/1467196980209896/"},
)
CRISPY_TEMPLATE_PACK = 'bootstrap3'
MARKITUP_FILTER = ('markdown.markdown', {
'safe_mode': False,
'extensions': ['outline'],
})
# Use HTTPS jquery URL so it's accessible on HTTPS pages (e.g. editing a talk)
JQUERY_URL = 'https://ajax.googleapis.com/ajax/libs/jquery/2.0.3/jquery.min.js'
| Python | 0 |
39586322784382e9dfd4a961bda4253bb27bca5f | Add support for the django debug toolbar | settings.py | settings.py | # Django settings for authentic project.
import os
DEBUG = True
USE_DEBUG_TOOLBAR = True
TEMPLATE_DEBUG = DEBUG
PROJECT_PATH = os.path.dirname(os.path.abspath(__file__))
ADMINS = (
# ('Your Name', 'your_email@domain.com'),
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3', # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'authentic.db', # Or path to database file if using sqlite3.
'USER': '', # Not used with sqlite3.
'PASSWORD': '', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# On Unix systems, a value of None will cause Django to use the same
# timezone as the operating system.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'Europe/Paris'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale
USE_L10N = True
# Absolute path to the directory that holds media.
# Example: "/home/media/media.lawrence.com/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash if there is a path component (optional in other cases).
# Examples: "http://media.lawrence.com", "http://example.com/media/"
MEDIA_URL = ''
# URL prefix for admin media -- CSS, JavaScript and images. Make sure to use a
# trailing slash.
# Examples: "http://foo.com/media/", "/media/".
ADMIN_MEDIA_PREFIX = '/media/'
# Make this unique, and don't share it with anybody.
SECRET_KEY = '0!=(1kc6kri-ui+tmj@mr+*0bvj!(p*r0duu2n=)7@!p=pvf9n'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.csrf.middleware.CsrfMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
)
ROOT_URLCONF = 'authentic.urls'
TEMPLATE_DIRS = (
os.path.join(PROJECT_PATH, 'templates'),
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.admin',
'authentic.idp'
)
INTERNAL_IPS = ('127.0.0.1',)
if USE_DEBUG_TOOLBAR:
MIDDLEWARE_CLASSES += ('debug_toolbar.middleware.DebugToolbarMiddleware',)
INSTALLED_APPS += ('debug_toolbar',)
| # Django settings for authentic project.
import os
DEBUG = True
TEMPLATE_DEBUG = DEBUG
PROJECT_PATH = os.path.dirname(os.path.abspath(__file__))
ADMINS = (
# ('Your Name', 'your_email@domain.com'),
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3', # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'authentic.db', # Or path to database file if using sqlite3.
'USER': '', # Not used with sqlite3.
'PASSWORD': '', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# On Unix systems, a value of None will cause Django to use the same
# timezone as the operating system.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'Europe/Paris'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale
USE_L10N = True
# Absolute path to the directory that holds media.
# Example: "/home/media/media.lawrence.com/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash if there is a path component (optional in other cases).
# Examples: "http://media.lawrence.com", "http://example.com/media/"
MEDIA_URL = ''
# URL prefix for admin media -- CSS, JavaScript and images. Make sure to use a
# trailing slash.
# Examples: "http://foo.com/media/", "/media/".
ADMIN_MEDIA_PREFIX = '/media/'
# Make this unique, and don't share it with anybody.
SECRET_KEY = '0!=(1kc6kri-ui+tmj@mr+*0bvj!(p*r0duu2n=)7@!p=pvf9n'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.csrf.middleware.CsrfMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
)
ROOT_URLCONF = 'authentic.urls'
TEMPLATE_DIRS = (
os.path.join(PROJECT_PATH, 'templates'),
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.admin',
'authentic.idp'
)
| Python | 0 |
145c31a283bb458bdce72169ea16f07040236ee5 | add comment about settings.py | settings.py | settings.py | """
To run `django-admin.py syncdb --settings settings --noinput` before testing.
"""
SECRET_KEY = 'x'
DATABASES = {
'default': {
'ENGINE': 'django.contrib.gis.db.backends.postgis',
'NAME': 'travis_ci_test',
}
}
INSTALLED_APPS=(
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.gis',
'boundaries',
)
| SECRET_KEY = 'x'
DATABASES = {
'default': {
'ENGINE': 'django.contrib.gis.db.backends.postgis',
'NAME': 'travis_ci_test',
}
}
INSTALLED_APPS=(
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.gis',
'boundaries',
)
| Python | 0 |
417ab5241c852cdcd072143bc2444f20f2117623 | Update capture profiler to new spec of providing board instead of string. | tests/execution_profiles/capture_profile.py | tests/execution_profiles/capture_profile.py | import cProfile
from pqhelper import capture
def main():
cProfile.run('test_solution(catapult)')
def test_solution(board_string):
board = capture.Board(board_string)
print capture.capture(board)
skeleton = '''
..*..*..
.gm..mg.
.ms..sm.
.rs..sr.
.ggmmgg.
.rsggsr.
.rsrrsr.
ssgssgss'''
giant_rat = '''
...mm...
..mrym..
.mgyrgm.
mygrygym
ryxssxyr
rxgbbgxr
xygssgyx
rybssbyr'''
griffon = '''
.r..s...
.b.sy...
.b.yys..
.r.yxg.g
.g.x*b.r
.g.xxb.r
rybyxygy
ygyxybyr'''
catapult = '''
........
........
..mbgm..
.mxgbrm.
my*gb*gm
yrrxrxxg
ymxyyrmg
ssxssrss'''
easy = '''
........
........
........
........
.......x
....xx.r
....rr.r
..rryyry'''
if __name__ == '__main__':
main() | import cProfile
from pqhelper import capture
def main():
cProfile.run('test_solution(catapult)')
def test_solution(board_string):
print capture.capture(board_string)
skeleton = '''
..*..*..
.gm..mg.
.ms..sm.
.rs..sr.
.ggmmgg.
.rsggsr.
.rsrrsr.
ssgssgss'''
giant_rat = '''
...mm...
..mrym..
.mgyrgm.
mygrygym
ryxssxyr
rxgbbgxr
xygssgyx
rybssbyr'''
griffon = '''
.r..s...
.b.sy...
.b.yys..
.r.yxg.g
.g.x*b.r
.g.xxb.r
rybyxygy
ygyxybyr'''
catapult = '''
........
........
..mbgm..
.mxgbrm.
my*gb*gm
yrrxrxxg
ymxyyrmg
ssxssrss'''
easy = '''
........
........
........
........
.......x
....xx.r
....rr.r
..rryyry'''
if __name__ == '__main__':
main() | Python | 0 |
5829afb4345d09a04fca61e51624f580a95c408d | remove None.appspot.com from settings.PLAYGROUND_HOSTS | settings.py | settings.py | """Module containing global playground constants and functions."""
import os
from google.appengine.api import app_identity
from google.appengine.api import backends
import appids
import secret
DEBUG = True
# user content hostname prefix
USER_CONTENT_PREFIX = 'user-content'
# RFC1113 formatted 'Expires' to prevent HTTP/1.0 caching
LONG_AGO = 'Mon, 01 Jan 1990 00:00:00 GMT'
# 10 minutes
TEMPLATE_MEMCACHE_TIME = 3600
# owners of template projects
PUBLIC_PROJECT_TEMPLATE_OWNER = 'PUBLIC_TEMPLATE'
MANUAL_PROJECT_TEMPLATE_OWNER = 'MANUAL_TEMPLATE'
PROJECT_TEMPLATE_OWNERS = [
PUBLIC_PROJECT_TEMPLATE_OWNER,
MANUAL_PROJECT_TEMPLATE_OWNER
]
# whether or not we're running in the dev_appserver
_DEV_MODE = os.environ['SERVER_SOFTWARE'].startswith('Development/')
# namespace for playground specific data
PLAYGROUND_NAMESPACE = '_playground'
# template projects location
TEMPLATE_PROJECT_DIR = 'repos/'
# project access_key query parameter name
ACCESS_KEY_SET_COOKIE_PARAM_NAME = 'set_access_key_cookie'
ACCESS_KEY_HTTP_HEADER = 'X-Cloud-Playground-Access-Key'
ACCESS_KEY_COOKIE_NAME = 'access_key'
ACCESS_KEY_COOKIE_ARGS = {
'httponly': True,
'secure': not _DEV_MODE,
}
# name for the session cookie
SESSION_COOKIE_NAME = 'session'
SESSION_COOKIE_ARGS = {
'httponly': True,
'secure': not _DEV_MODE,
}
XSRF_COOKIE_ARGS = {
'httponly': False,
'secure': not _DEV_MODE,
}
WSGI_CONFIG = {
'webapp2_extras.sessions': {
'secret_key': secret.GetSecret('webapp2_extras.sessions', entropy=128),
'cookie_args': SESSION_COOKIE_ARGS,
}
}
# One hour
MIN_EXPIRATION_SECONDS = 3600
# One week
DEFAULT_EXPIRATION_SECONDS = 604800
# Extensions to exclude when creating template projects
SKIP_EXTENSIONS = ('swp', 'pyc', 'svn')
if _DEV_MODE:
PLAYGROUND_HOSTS = ['localhost:8080', '127.0.0.1:8080',
# port 7070 for karma e2e test
'localhost:7070', '127.0.0.1:7070',
app_identity.get_default_version_hostname()]
# PLAYGROUND_USER_CONTENT_HOST = backends.get_hostname('user-content-backend')
PLAYGROUND_USER_CONTENT_HOST = None
MIMIC_HOST = backends.get_hostname('exec-code-backend')
else:
PLAYGROUND_HOSTS = ['{}.appspot.com'.format(appids.PLAYGROUND_APP_ID)]
if appids.PLAYGROUND_APP_ID_ALIAS:
PLAYGROUND_HOSTS.append('{}.appspot.com'
.format(appids.PLAYGROUND_APP_ID_ALIAS))
# PLAYGROUND_USER_CONTENT_HOST = ('{0}-dot-{1}.appspot.com'
# .format(USER_CONTENT_PREFIX,
# appids.PLAYGROUND_APP_ID))
PLAYGROUND_USER_CONTENT_HOST = None
MIMIC_HOST = '{0}.appspot.com'.format(appids.MIMIC_APP_ID)
| """Module containing global playground constants and functions."""
import os
from google.appengine.api import app_identity
from google.appengine.api import backends
import appids
import secret
DEBUG = True
# user content hostname prefix
USER_CONTENT_PREFIX = 'user-content'
# RFC1113 formatted 'Expires' to prevent HTTP/1.0 caching
LONG_AGO = 'Mon, 01 Jan 1990 00:00:00 GMT'
# 10 minutes
TEMPLATE_MEMCACHE_TIME = 3600
# owners of template projects
PUBLIC_PROJECT_TEMPLATE_OWNER = 'PUBLIC_TEMPLATE'
MANUAL_PROJECT_TEMPLATE_OWNER = 'MANUAL_TEMPLATE'
PROJECT_TEMPLATE_OWNERS = [
PUBLIC_PROJECT_TEMPLATE_OWNER,
MANUAL_PROJECT_TEMPLATE_OWNER
]
# whether or not we're running in the dev_appserver
_DEV_MODE = os.environ['SERVER_SOFTWARE'].startswith('Development/')
# namespace for playground specific data
PLAYGROUND_NAMESPACE = '_playground'
# template projects location
TEMPLATE_PROJECT_DIR = 'repos/'
# project access_key query parameter name
ACCESS_KEY_SET_COOKIE_PARAM_NAME = 'set_access_key_cookie'
ACCESS_KEY_HTTP_HEADER = 'X-Cloud-Playground-Access-Key'
ACCESS_KEY_COOKIE_NAME = 'access_key'
ACCESS_KEY_COOKIE_ARGS = {
'httponly': True,
'secure': not _DEV_MODE,
}
# name for the session cookie
SESSION_COOKIE_NAME = 'session'
SESSION_COOKIE_ARGS = {
'httponly': True,
'secure': not _DEV_MODE,
}
XSRF_COOKIE_ARGS = {
'httponly': False,
'secure': not _DEV_MODE,
}
WSGI_CONFIG = {
'webapp2_extras.sessions': {
'secret_key': secret.GetSecret('webapp2_extras.sessions', entropy=128),
'cookie_args': SESSION_COOKIE_ARGS,
}
}
# One hour
MIN_EXPIRATION_SECONDS = 3600
# One week
DEFAULT_EXPIRATION_SECONDS = 604800
# Extensions to exclude when creating template projects
SKIP_EXTENSIONS = ('swp', 'pyc', 'svn')
if _DEV_MODE:
PLAYGROUND_HOSTS = ('localhost:8080', '127.0.0.1:8080',
# port 7070 for karma e2e test
'localhost:7070', '127.0.0.1:7070',
app_identity.get_default_version_hostname())
# PLAYGROUND_USER_CONTENT_HOST = backends.get_hostname('user-content-backend')
PLAYGROUND_USER_CONTENT_HOST = None
MIMIC_HOST = backends.get_hostname('exec-code-backend')
else:
PLAYGROUND_HOSTS = ('{}.appspot.com'.format(appids.PLAYGROUND_APP_ID),
'{}.appspot.com'.format(appids.PLAYGROUND_APP_ID_ALIAS))
# PLAYGROUND_USER_CONTENT_HOST = ('{0}-dot-{1}.appspot.com'
# .format(USER_CONTENT_PREFIX,
# appids.PLAYGROUND_APP_ID))
PLAYGROUND_USER_CONTENT_HOST = None
MIMIC_HOST = '{0}.appspot.com'.format(appids.MIMIC_APP_ID)
| Python | 0.000005 |
08edcd99f379962cbd761c743a727e86095b7a48 | Convert to list in case it is not | setup.in.py | setup.in.py | #
# Copyright 2012-2019 CNRS-UM LIRMM, CNRS-AIST JRL
#
from __future__ import print_function
try:
from setuptools import setup
from setuptools import Extension
except ImportError:
from distutils.core import setup
from distutils.extension import Extension
from Cython.Build import cythonize
import hashlib
import numpy
import os
import subprocess
import sys
win32_build = os.name == 'nt'
this_path = os.path.dirname(os.path.realpath(__file__))
with open(this_path + '/sch/__init__.py', 'w') as fd:
fd.write('from .sch import *\n')
sha512 = hashlib.sha512()
src_files = ['sch/c_sch_private.pxd', 'sch/sch.pyx', 'sch/c_sch.pxd', 'sch/sch.pxd', 'include/sch_wrapper.hpp']
src_files = [ '{}/{}'.format(this_path, f) for f in src_files ]
for f in src_files:
chunk = 2**16
with open(f, 'r') as fd:
while True:
data = fd.read(chunk)
if data:
sha512.update(data.encode('ascii'))
else:
break
version_hash = sha512.hexdigest()[:7]
class pkg_config(object):
def __init__(self):
self.compile_args = [ '-D' + x for x in '@COMPILE_DEFINITIONS@'.split(';') if len(x) ]
self.compile_args += ['-std=c++11']
if win32_build:
self.compile_args.append('-DWIN32')
self.include_dirs = [ x for x in '$<TARGET_PROPERTY:SpaceVecAlg::SpaceVecAlg,INTERFACE_INCLUDE_DIRECTORIES>;$<TARGET_PROPERTY:sch-core::sch-core,INCLUDE_DIRECTORIES>'.split(';') if len(x) ]
self.include_dirs.append('@Boost_INCLUDE_DIR@')
self.include_dirs.append(this_path + '/include')
self.include_dirs = filter(len, self.include_dirs)
self.library_dirs = [ x for x in '$<TARGET_PROPERTY:sch-core::sch-core,LINK_FLAGS>'.split(';') if len(x) ]
location = '$<TARGET_PROPERTY:sch-core::sch-core,LOCATION_$<CONFIGURATION>>'
self.library_dirs.append(os.path.dirname(location) + "/../lib/")
if "$<CONFIGURATION>".lower() == "debug":
self.libraries = ['sch-core_d']
else:
self.libraries = ['sch-core']
configs = pkg_config()
def GenExtension(name, pkg, ):
pyx_src = name.replace('.', '/')
cpp_src = pyx_src + '.cpp'
pyx_src = pyx_src + '.pyx'
ext_src = pyx_src
pkg.include_dirs=list(pkg.include_dirs)
return Extension(name, [ext_src], extra_compile_args = pkg.compile_args, include_dirs = pkg.include_dirs + [numpy.get_include()], library_dirs = pkg.library_dirs, libraries = pkg.libraries)
extensions = [
GenExtension('sch.sch', configs)
]
extensions = [ x for x in extensions if x is not None ]
packages = ['sch']
data = ['__init__.py', 'c_sch.pxd', 'sch.pxd']
cython_packages = [ x for x in packages if any([ext.name.startswith(x) for ext in extensions]) ]
extensions = cythonize(extensions)
setup(
name = 'sch',
version='@PROJECT_VERSION@-{}'.format(version_hash),
ext_modules = extensions,
packages = packages,
package_data = { 'sch': data }
)
| #
# Copyright 2012-2019 CNRS-UM LIRMM, CNRS-AIST JRL
#
from __future__ import print_function
try:
from setuptools import setup
from setuptools import Extension
except ImportError:
from distutils.core import setup
from distutils.extension import Extension
from Cython.Build import cythonize
import hashlib
import numpy
import os
import subprocess
import sys
win32_build = os.name == 'nt'
this_path = os.path.dirname(os.path.realpath(__file__))
with open(this_path + '/sch/__init__.py', 'w') as fd:
fd.write('from .sch import *\n')
sha512 = hashlib.sha512()
src_files = ['sch/c_sch_private.pxd', 'sch/sch.pyx', 'sch/c_sch.pxd', 'sch/sch.pxd', 'include/sch_wrapper.hpp']
src_files = [ '{}/{}'.format(this_path, f) for f in src_files ]
for f in src_files:
chunk = 2**16
with open(f, 'r') as fd:
while True:
data = fd.read(chunk)
if data:
sha512.update(data.encode('ascii'))
else:
break
version_hash = sha512.hexdigest()[:7]
class pkg_config(object):
def __init__(self):
self.compile_args = [ '-D' + x for x in '@COMPILE_DEFINITIONS@'.split(';') if len(x) ]
self.compile_args += ['-std=c++11']
if win32_build:
self.compile_args.append('-DWIN32')
self.include_dirs = [ x for x in '$<TARGET_PROPERTY:SpaceVecAlg::SpaceVecAlg,INTERFACE_INCLUDE_DIRECTORIES>;$<TARGET_PROPERTY:sch-core::sch-core,INCLUDE_DIRECTORIES>'.split(';') if len(x) ]
self.include_dirs.append('@Boost_INCLUDE_DIR@')
self.include_dirs.append(this_path + '/include')
self.include_dirs = filter(len, self.include_dirs)
self.library_dirs = [ x for x in '$<TARGET_PROPERTY:sch-core::sch-core,LINK_FLAGS>'.split(';') if len(x) ]
location = '$<TARGET_PROPERTY:sch-core::sch-core,LOCATION_$<CONFIGURATION>>'
self.library_dirs.append(os.path.dirname(location) + "/../lib/")
if "$<CONFIGURATION>".lower() == "debug":
self.libraries = ['sch-core_d']
else:
self.libraries = ['sch-core']
configs = pkg_config()
def GenExtension(name, pkg, ):
pyx_src = name.replace('.', '/')
cpp_src = pyx_src + '.cpp'
pyx_src = pyx_src + '.pyx'
ext_src = pyx_src
return Extension(name, [ext_src], extra_compile_args = pkg.compile_args, include_dirs = pkg.include_dirs + [numpy.get_include()], library_dirs = pkg.library_dirs, libraries = pkg.libraries)
extensions = [
GenExtension('sch.sch', configs)
]
extensions = [ x for x in extensions if x is not None ]
packages = ['sch']
data = ['__init__.py', 'c_sch.pxd', 'sch.pxd']
cython_packages = [ x for x in packages if any([ext.name.startswith(x) for ext in extensions]) ]
extensions = cythonize(extensions)
setup(
name = 'sch',
version='@PROJECT_VERSION@-{}'.format(version_hash),
ext_modules = extensions,
packages = packages,
package_data = { 'sch': data }
)
| Python | 1 |
a12d61e9a9b85c436d5b21b39862497b7e3ed903 | update tpp.py for gen3 | tpp.py | tpp.py | #This script will show updates to the Twitch Plays Pokemon live feed on reddit.
#You can only show important updates by passing the --important flag when you run the script
#This could be easily adapted for other live feeds (or totally generic) but for now
#it is hardcoded for the TPP feed.
#python-requests is required to run this.
#Install using:
#pip install requests
from __future__ import print_function
import requests
import time
import argparse
import sys
_parser = argparse.ArgumentParser(description="Live Twitch Plays Pokemon updates in your console.")
_parser.add_argument("--important", action="store_true")
_args = _parser.parse_args()
_api_url = "http://api.reddit.com/live/nawsz3vn7ui6hdsgciytwcxadi"
_headers = {"User-Agent": "TTPConsole/1.1 by sc00ty"}
_timeout = 60 #1 Minute
_last_id = ""
while True:
try:
#Request the JSON data for the live feed
payload = {"before": _last_id}
feed = requests.get(_api_url, params=payload, headers=_headers).json()
#Iterate backwards through the list, making it so items are shown chronologically
for feed_item in feed["data"]["children"][::-1]:
#Store the last seen id
_last_id = feed_item["data"]["name"]
body_text = feed_item["data"]["body"]
#If all text should be shown OR only important stuff, and this is important... show the update!
if not _args.important or ("**" in body_text and _args.important):
print("%s\n" % (body_text,))
#Progress towards next update.
for i in range (0, _timeout):
print("Checking for update in %ss.\r" % (_timeout - i), end="")
sys.stdout.flush()
time.sleep(1)
except KeyboardInterrupt:
break
except Exception:
print("Encountered an error while retrieving data. Exiting...")
| #This script will show updates to the Twitch Plays Pokemon live feed on reddit.
#You can only show important updates by passing the --important flag when you run the script
#This could be easily adapted for other live feeds (or totally generic) but for now
#it is hardcoded for the TPP feed.
#python-requests is required to run this.
#Install using:
#pip install requests
from __future__ import print_function
import requests
import time
import argparse
import sys
_parser = argparse.ArgumentParser(description="Live Twitch Plays Pokemon updates in your console.")
_parser.add_argument("--important", action="store_true")
_args = _parser.parse_args()
_api_url = "http://api.reddit.com/live/m5n42gvbpyi6hdisciytwamw3a"
_headers = {"User-Agent": "TTPConsole/1.1 by sc00ty"}
_timeout = 60 #1 Minute
_last_id = ""
while True:
try:
#Request the JSON data for the live feed
payload = {"before": _last_id}
feed = requests.get(_api_url, params=payload, headers=_headers).json()
#Iterate backwards through the list, making it so items are shown chronologically
for feed_item in feed["data"]["children"][::-1]:
#Store the last seen id
_last_id = feed_item["data"]["name"]
body_text = feed_item["data"]["body"]
#If all text should be shown OR only important stuff, and this is important... show the update!
if not _args.important or ("**" in body_text and _args.important):
print("%s\n" % (body_text,))
#Progress towards next update.
for i in range (0, _timeout):
print("Checking for update in %ss.\r" % (_timeout - i), end="")
sys.stdout.flush()
time.sleep(1)
except KeyboardInterrupt:
break
except Exception:
print("Encountered an error while retrieving data. Exiting...")
| Python | 0 |
c81eb510c72511c1f692f02f7bb63ef4caa51d27 | Add management functions for migration | apps/concept/management/commands/update_concept_totals.py | apps/concept/management/commands/update_concept_totals.py | from optparse import make_option
import sys
from django.core.management.base import BaseCommand
from concept.models import Concept
class Command(BaseCommand):
args = ""
help = "Update concept total_question counts (post db import)"
def handle(self, *args, **options):
for concept in Concept.objects.all():
concept.total_questions = concept.question_set.count()
concept.save()
| from optparse import make_option
import sys
from django.core.management.base import BaseCommand
from education.models import Concept
class Command(BaseCommand):
args = ""
help = "Update concept total_question counts (post db import)"
def handle(self, *args, **options):
for concept in Concept.objects.all():
concept.total_questions = concept.question_set.count()
concept.save()
| Python | 0 |
6a39a514ae82f412c107dd87944cdb17b6a9d036 | remove isinstance assert in test_remove_site_packages_64bit | tests/test_server32_remove_site_packages.py | tests/test_server32_remove_site_packages.py | import os
import sys
try:
import pytest
except ImportError: # the 32-bit server does not need pytest installed
class Mark(object):
@staticmethod
def skipif(condition, reason=None):
def func(function):
return function
return func
class pytest(object):
mark = Mark
from msl.loadlib import Server32, Client64, IS_MAC
from msl.examples.loadlib import EXAMPLES_DIR
class Site32(Server32):
def __init__(self, host, port, **kwargs):
super(Site32, self).__init__(
os.path.join(kwargs['ex_dir'], 'cpp_lib32'),
'cdll', host, port
)
def remove(self):
return self.remove_site_packages_64bit()
@staticmethod
def contains(path):
return path in sys.path
class Site64(Client64):
def __init__(self):
super(Site64, self).__init__(__file__, ex_dir=EXAMPLES_DIR)
def remove(self):
return self.request32('remove')
def contains(self, path):
return self.request32('contains', path)
@pytest.mark.skipif(IS_MAC, reason='the 32-bit server for macOS does not exist')
def test_remove_site_packages_64bit():
s = Site64()
path = s.remove()
assert path in sys.path
assert not s.contains(path)
| import os
import sys
try:
import pytest
except ImportError: # the 32-bit server does not need pytest installed
class Mark(object):
@staticmethod
def skipif(condition, reason=None):
def func(function):
return function
return func
class pytest(object):
mark = Mark
from msl.loadlib import Server32, Client64, IS_MAC
from msl.examples.loadlib import EXAMPLES_DIR
class Site32(Server32):
def __init__(self, host, port, **kwargs):
super(Site32, self).__init__(
os.path.join(kwargs['ex_dir'], 'cpp_lib32'),
'cdll', host, port
)
def remove(self):
return self.remove_site_packages_64bit()
@staticmethod
def contains(path):
return path in sys.path
class Site64(Client64):
def __init__(self):
super(Site64, self).__init__(__file__, ex_dir=EXAMPLES_DIR)
def remove(self):
return self.request32('remove')
def contains(self, path):
return self.request32('contains', path)
@pytest.mark.skipif(IS_MAC, reason='the 32-bit server for macOS does not exist')
def test_remove_site_packages_64bit():
s = Site64()
path = s.remove()
assert isinstance(path, str)
assert path in sys.path
assert not s.contains(path)
| Python | 0.000007 |
7d47ab3aed3fb1c591966fe1a84e7c5f8d4ce909 | Print usage if only an optional option sent | qos.py | qos.py | #!/usr/bin/python
# Author: Anthony Ruhier
# Set QoS rules
import os
import subprocess
import argparse
import sys
import logging
try:
from config import DEBUG
except ImportError:
DEBUG = False
import tools
def run_as_root():
"""
Restart the script as root
"""
# Need to be root
if os.geteuid() != 0:
print("You need to be root to run this script. Relaunching with "
"sudo...\n")
subprocess.call(["sudo", sys.executable] + sys.argv)
exit()
def apply_qos():
run_as_root()
# Clean old rules
reset_qos()
# Setting new rules
print("Setting new rules")
setup_qos()
def reset_qos():
run_as_root()
print("Removing tc rules")
ifnames = get_ifnames()
tools.qdisc_del(ifnames, "htb", stderr=subprocess.DEVNULL)
return
def show_qos():
ifnames = get_ifnames()
print("\n\t QDiscs details\n\t================\n")
tools.qdisc_show(ifnames, "details")
print("\n\t QDiscs stats\n\t==============\n")
tools.qdisc_show(ifnames, "details")
def set_debug(level):
if level or DEBUG:
log_level = logging.DEBUG
else:
log_level = logging.WARNING
logging.basicConfig(
stream=sys.stderr,
format="[%(levelname)s] %(message)s (%(filename)s:%(lineno)d) ",
level=log_level
)
if __name__ == '__main__':
# Set all arguments possible for this script
parser = argparse.ArgumentParser(
description="Script to set, show or delete QoS rules with TC"
)
# Start/Stop/Show command
sp_action = parser.add_subparsers()
sp_start = sp_action.add_parser("start", help="set QoS rules")
sp_stop = sp_action.add_parser("stop", help="Remove all QoS rules")
sp_show = sp_action.add_parser("show", help="Show QoS rules")
# Set function to call for each options
sp_start.set_defaults(func=apply_qos)
sp_stop.set_defaults(func=reset_qos)
sp_show.set_defaults(func=show_qos)
# Debug option
parser.add_argument('-d', '--debug', help="Set the debug level",
dest="debug", action="store_true")
# Different ways to create QoS
parser_group = parser.add_mutually_exclusive_group()
# Use class rules
parser_group.add_argument('-c', '--class',
help="Use class rules (default)",
dest="class", action="store_true")
# Use tree rules
parser_group.add_argument('-t', '--tree', help="Use tree rules",
dest="tree", action="store_true")
# If no argument provided show help
if len(sys.argv) == 1:
parser.print_help()
sys.exit(1)
# Parse argument
args = parser.parse_args()
# Set debug mode
set_debug(args.debug)
if args.tree:
from rules_parser import setup_qos, get_ifnames
else:
from class_parser import setup_qos, get_ifnames
# Execute correct function, or print usage
try:
args.func()
except AttributeError:
parser.print_usage()
sys.exit(1)
| #!/usr/bin/python
# Author: Anthony Ruhier
# Set QoS rules
import os
import subprocess
import argparse
import sys
import logging
try:
from config import DEBUG
except ImportError:
DEBUG = False
import tools
def run_as_root():
"""
Restart the script as root
"""
# Need to be root
if os.geteuid() != 0:
print("You need to be root to run this script. Relaunching with "
"sudo...\n")
subprocess.call(["sudo", sys.executable] + sys.argv)
exit()
def apply_qos():
run_as_root()
# Clean old rules
reset_qos()
# Setting new rules
print("Setting new rules")
setup_qos()
def reset_qos():
run_as_root()
print("Removing tc rules")
ifnames = get_ifnames()
tools.qdisc_del(ifnames, "htb", stderr=subprocess.DEVNULL)
return
def show_qos():
ifnames = get_ifnames()
print("\n\t QDiscs details\n\t================\n")
tools.qdisc_show(ifnames, "details")
print("\n\t QDiscs stats\n\t==============\n")
tools.qdisc_show(ifnames, "details")
def set_debug(level):
if level or DEBUG:
log_level = logging.DEBUG
else:
log_level = logging.WARNING
logging.basicConfig(
stream=sys.stderr,
format="[%(levelname)s] %(message)s (%(filename)s:%(lineno)d) ",
level=log_level
)
if __name__ == '__main__':
# Set all arguments possible for this script
parser = argparse.ArgumentParser(
description="Script to set, show or delete QoS rules with TC"
)
# Start/Stop/Show command
sp_action = parser.add_subparsers()
sp_start = sp_action.add_parser("start", help="set QoS rules")
sp_stop = sp_action.add_parser("stop", help="Remove all QoS rules")
sp_show = sp_action.add_parser("show", help="Show QoS rules")
# Set function to call for each options
sp_start.set_defaults(func=apply_qos)
sp_stop.set_defaults(func=reset_qos)
sp_show.set_defaults(func=show_qos)
# Debug option
parser.add_argument('-d', '--debug', help="Set the debug level",
dest="debug", action="store_true")
# Different ways to create QoS
parser_group = parser.add_mutually_exclusive_group()
# Use class rules
parser_group.add_argument('-c', '--class',
help="Use class rules (default)",
dest="class", action="store_true")
# Use tree rules
parser_group.add_argument('-t', '--tree', help="Use tree rules",
dest="tree", action="store_true")
# If no argument provided show help
if len(sys.argv) == 1:
parser.print_help()
sys.exit(1)
# Parse argument
args = parser.parse_args()
# Set debug mode
set_debug(args.debug)
if args.tree:
from rules_parser import setup_qos, get_ifnames
else:
from class_parser import setup_qos, get_ifnames
# Execute correct function
args.func()
| Python | 0.000005 |
6fc065dc2f88c0c59037f5a6efa89738d963977e | Support XML-RPC marshalling of mx.DateTime. | rpc.py | rpc.py | import xmlrpclib
import traceback
from cStringIO import StringIO
allowed = ('package_releases', 'package_urls', 'package_data',
'search', 'list_packages', 'release_urls', 'release_data',
'updated_releases', 'changelog', 'post_cheesecake_for_release')
# monkey-patch xmlrpclib to marshal mx.DateTime correctly.
import mx.DateTime
def dump_DateTime(self, value, write):
write("<value><dateTime.iso8601>")
write(value.strftime("%Y%m%dT%H:%M:%S"))
write("</dateTime.iso8601></value>\n")
xmlrpclib.Marshaller.dispatch[mx.DateTime.DateTimeType] = dump_DateTime
def handle_request(webui_obj):
webui_obj.handler.send_response(200, 'OK')
webui_obj.handler.send_header('Content-type', 'text/xml')
webui_obj.handler.send_header('charset', 'UTF-8' );
webui_obj.handler.end_headers()
try:
methodArgs, methodName = xmlrpclib.loads(webui_obj.handler.rfile.read())
if methodName in allowed:
response = globals()[methodName](webui_obj.store, *methodArgs)
else:
raise KeyError, "Method %r does not exist" % (methodName,)
if response is None:
response = ''
# xmlrpclib.dumps encodes Unicode as UTF-8
xml = xmlrpclib.dumps((response,), methodresponse=True, allow_none=True)
webui_obj.handler.wfile.write(xml)
except:
out = StringIO()
traceback.print_exc(file=out)
result = xmlrpclib.dumps(xmlrpclib.Fault(1, out.getvalue()), methodresponse=True)
webui_obj.handler.wfile.write(result)
def list_packages(store):
result = store.get_packages()
return [row['name'] for row in result]
def package_releases(store, package_name, show_hidden=False):
if show_hidden:
hidden = None
else:
hidden = False
result = store.get_package_releases(package_name, hidden=hidden)
return [row['version'] for row in result]
def release_urls(store, package_name, version):
result = []
for file in store.list_files(package_name, version):
info = file.as_dict()
info['url'] = store.gen_file_url(info['python_version'],
package_name, info['filename'])
result.append(info)
# TODO do something with release_urls when there is something to do
#info = store.get_package(package_name, version)
#if info['download_url']:
# result.append({'url': info['download_url']})
return result
package_urls = release_urls # "deprecated"
def release_data(store, package_name, version):
info = store.get_package(package_name, version).as_dict()
del info['description_html']
for col in ('requires', 'provides', 'obsoletes'):
rows = store.get_release_relationships(package_name, version, col)
info[col] = [row['specifier'] for row in rows]
classifiers = [r[0] for r in store.get_release_classifiers(package_name,
version)]
info['classifiers' ] = classifiers
return info
package_data = release_data # "deprecated"
def search(store, spec, operator='and'):
spec['_pypi_hidden'] = 'FALSE'
return [row.as_dict() for row in store.query_packages(spec, operator)]
def updated_releases(store, since):
result = store.updated_releases(since)
return [(row['name'], row['version']) for row in result]
def changelog(store, since):
result = store.changelog(since)
return [(row['name'],row['version'],
int(row['submitted_date'].gmticks()),
row['action'])
for row in result]
def post_cheesecake_for_release(store, name, version, score_data, password):
if password != store.config.cheesecake_password:
raise ValuError("Bad password.")
store.save_cheesecake_score(name, version, score_data)
store.commit()
| import xmlrpclib
import traceback
from cStringIO import StringIO
allowed = ('package_releases', 'package_urls', 'package_data',
'search', 'list_packages', 'release_urls', 'release_data',
'updated_releases', 'changelog', 'post_cheesecake_for_release')
def handle_request(webui_obj):
webui_obj.handler.send_response(200, 'OK')
webui_obj.handler.send_header('Content-type', 'text/xml')
webui_obj.handler.send_header('charset', 'UTF-8' );
webui_obj.handler.end_headers()
try:
methodArgs, methodName = xmlrpclib.loads(webui_obj.handler.rfile.read())
if methodName in allowed:
response = globals()[methodName](webui_obj.store, *methodArgs)
else:
raise KeyError, "Method %r does not exist" % (methodName,)
if response is None:
response = ''
# xmlrpclib.dumps encodes Unicode as UTF-8
xml = xmlrpclib.dumps((response,), methodresponse=True, allow_none=True)
webui_obj.handler.wfile.write(xml)
except:
out = StringIO()
traceback.print_exc(file=out)
result = xmlrpclib.dumps(xmlrpclib.Fault(1, out.getvalue()), methodresponse=True)
webui_obj.handler.wfile.write(result)
def list_packages(store):
result = store.get_packages()
return [row['name'] for row in result]
def package_releases(store, package_name, show_hidden=False):
if show_hidden:
hidden = None
else:
hidden = False
result = store.get_package_releases(package_name, hidden=hidden)
return [row['version'] for row in result]
def release_urls(store, package_name, version):
result = []
for file in store.list_files(package_name, version):
info = file.as_dict()
info['url'] = store.gen_file_url(info['python_version'],
package_name, info['filename'])
result.append(info)
# TODO do something with release_urls when there is something to do
#info = store.get_package(package_name, version)
#if info['download_url']:
# result.append({'url': info['download_url']})
return result
package_urls = release_urls # "deprecated"
def release_data(store, package_name, version):
info = store.get_package(package_name, version).as_dict()
del info['description_html']
for col in ('requires', 'provides', 'obsoletes'):
rows = store.get_release_relationships(package_name, version, col)
info[col] = [row['specifier'] for row in rows]
classifiers = [r[0] for r in store.get_release_classifiers(package_name,
version)]
info['classifiers' ] = classifiers
return info
package_data = release_data # "deprecated"
def search(store, spec, operator='and'):
spec['_pypi_hidden'] = 'FALSE'
return [row.as_dict() for row in store.query_packages(spec, operator)]
def updated_releases(store, since):
result = store.updated_releases(since)
return [(row['name'], row['version']) for row in result]
def changelog(store, since):
result = store.changelog(since)
return [(row['name'],row['version'],
int(row['submitted_date'].gmticks()),
row['action'])
for row in result]
def post_cheesecake_for_release(store, name, version, score_data, password):
if password != store.config.cheesecake_password:
raise ValuError("Bad password.")
store.save_cheesecake_score(name, version, score_data)
store.commit()
| Python | 0 |
79ef0fe21b136b80889a8e6e06339074ac73a1f1 | Comment out section | run.py | run.py | __author__ = 'matt'
# import datetime
import blockbuster
# blockbuster.app.debug = blockbuster.config.debug_mode
#
# blockbuster.bb_logging.logger.info("@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@")
# blockbuster.bb_logging.logger.info("@@@@@@@@@@@@@@@@@@ BlockBuster " + blockbuster.__version__ + " "
# "@@@@@@@@@@@@@@@@@@")
# blockbuster.bb_logging.logger.info("@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@")
# blockbuster.bb_logging.logger.info("=== Application startup - " + str(datetime.datetime.now()) + " ====")
# blockbuster.bb_logging.logger.info(
# '================Time restriction disabled================') \
# if not blockbuster.config.timerestriction else blockbuster.bb_logging.logger.info(
# '================Time restriction enabled================')
#
# blockbuster.bb_logging.logger.info("@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@")
#
# if blockbuster.config.debug_mode:
# blockbuster.bb_logging.logger.info("========= APPLICATION IS RUNNING IN DEBUG MODE ==========")
# This section only applies when you are running run.py directly
if __name__ == '__main__':
blockbuster.bb_logging.logger.info("Running http on port 5000")
blockbuster.app.run(host='0.0.0.0', debug=True) | __author__ = 'matt'
import datetime
import blockbuster
blockbuster.app.debug = blockbuster.config.debug_mode
blockbuster.bb_logging.logger.info("@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@")
blockbuster.bb_logging.logger.info("@@@@@@@@@@@@@@@@@@ BlockBuster " + blockbuster.__version__ + " "
"@@@@@@@@@@@@@@@@@@")
blockbuster.bb_logging.logger.info("@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@")
blockbuster.bb_logging.logger.info("=== Application startup - " + str(datetime.datetime.now()) + " ====")
blockbuster.bb_logging.logger.info(
'================Time restriction disabled================') \
if not blockbuster.config.timerestriction else blockbuster.bb_logging.logger.info(
'================Time restriction enabled================')
blockbuster.bb_logging.logger.info("@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@")
if blockbuster.config.debug_mode:
blockbuster.bb_logging.logger.info("========= APPLICATION IS RUNNING IN DEBUG MODE ==========")
# This section only applies when you are running run.py directly
if __name__ == '__main__':
blockbuster.bb_logging.logger.info("Running http on port 5000")
blockbuster.app.run(host='0.0.0.0', debug=True) | Python | 0 |
35028b84d4757e1343a97da653670db049ac5e8d | replace default handler with static handler | web.py | web.py | #!/usr/bin/env python2
import tornado
import log
from tornado import web, httpserver
_http_server = None
_https_server = None
_html_root = './'
_log = None
# TODO: SSL needs this
# ssl_options['certfile'] - server certificate
# ssl_options['keyfile'] - server key
# ssl_options['ca_certs'] - CA certificate
def run_server(ssl_options = {}, http_port = 80, https_port = 443, log_facility = None, html_root = './'):
global _http_server
global _https_server
global _log
# list handlers for REST calls here
handlers = []
if log_facility:
_log = log_facility
else:
_log = log.TrivialLogger()
handlers.append(('/(.*)', web.StaticFileHandler, {'path': html_root}))
app = tornado.web.Application(handlers)
_log.info("creating servers")
_http_server = tornado.httpserver.HTTPServer(app, no_keep_alive = False)
_https_server = tornado.httpserver.HTTPServer(app, no_keep_alive = False, ssl_options = ssl_options)
_log.info("setting up TCP ports")
_http_server.listen(http_port)
_https_server.listen(https_port)
_log.info("starting server loop")
tornado.ioloop.IOLoop.instance().start()
_log.info("server loop exited")
| #!/usr/bin/env python2
import tornado
import log
import magic
from tornado import web, httpserver
_http_server = None
_https_server = None
_html_root = './'
_log = None
_magic = None
class DefaultHandler(tornado.web.RequestHandler):
def get(self, match):
_log.info("incoming request: {}".format(self.request))
_log.info("matched default match: {}".format(match))
self.set_header("Cache-Control", "no-store, no-cache, must-revalidate, max-age=0")
self.set_header("Connection", "close")
if match:
fname = _html_root + '/' + match
else:
fname = _html_root + '/index.html'
_log.info("fname: {}".format(fname))
try:
with open(fname, 'rb') as fd:
content = fd.read()
mime_type = _magic.file(fname)
self.set_header("Content-type", mime_type)
self.finish(content)
except:
self.set_status(404)
self.finish("Not found: {}".format(match))
_app = tornado.web.Application([
('/(.*)', DefaultHandler)
])
# TODO: SSL needs this
# ssl_options['certfile'] - server certificate
# ssl_options['keyfile'] - server key
# ssl_options['ca_certs'] - CA certificate
def run_server(ssl_options = {}, http_port = 80, https_port = 443, log_facility = None, html_root = './'):
global _http_server
global _https_server
global _log
global _magic
global _html_root
# http://www.zak.co.il/tddpirate/2013/03/03/the-python-module-for-file-type-identification-called-magic-is-not-standardized/
try:
_magic = magic.open(magic.MAGIC_MIME_TYPE)
_magic.load()
except AttributeError,e:
_magic = magic.Magic(mime=True)
_magic.file = _magic.from_file
if log_facility:
_log = log_facility
else:
_log = log.TrivialLogger()
_html_root = html_root
_log.info("creating servers")
_http_server = tornado.httpserver.HTTPServer(_app, no_keep_alive = False)
_https_server = tornado.httpserver.HTTPServer(_app, no_keep_alive = False, ssl_options = ssl_options)
_log.info("setting up TCP ports")
_http_server.listen(http_port)
_https_server.listen(https_port)
_log.info("starting server loop")
tornado.ioloop.IOLoop.instance().start()
_log.info("server loop exited")
| Python | 0 |
d3f03d6e2cf48929f8233e52720b07242ccd64da | Put tweets back | web.py | web.py | """ Heroku/Python Quickstart: https://blog.heroku.com/archives/2011/9/28/python_and_django"""
import os
import random
import requests
from flask import Flask
import tweepy
import settings
app = Flask(__name__)
@app.route('/')
def home_page():
return 'Hello from the SPARK learn-a-thon!'
def get_instagram_image():
instagram_api_url = 'https://api.instagram.com/v1/tags/spark/media/recent?client_id={}'.format(settings.CLIENT_ID)
data = requests.get(instagram_api_url).json()['data']
number_of_images = choose_number_of_images()
images_returned = []
for image in number_of_images:
images_returned.append(random.choice(data)['images']['low_resolution']['url'])
return images_returned
def get_tweets():
auth = tweepy.OAuthHandler(settings.CONSUMER_KEY, settings.CONSUMER_SECRET)
auth.set_access_token(settings.ACCESS_KEY, settings.ACCESS_SECRET)
api = tweepy.API(auth)
number_of_tweets = choose_number_of_tweets()
tweets = tweepy.Cursor(api.search, q='#spark')
return tweets.items(limit=number_of_tweets)
def choose_number_of_images():
number = 3
return number
def choose_number_of_tweets():
number = 3
return number
if __name__ == '__main__':
# port = int(os.environ.get("PORT", 5000))
# app.run(host='0.0.0.0', port=port)
| """ Heroku/Python Quickstart: https://blog.heroku.com/archives/2011/9/28/python_and_django"""
import os
import random
import requests
from flask import Flask
import tweepy
import settings
app = Flask(__name__)
@app.route('/')
def home_page():
return 'Hello from the SPARK learn-a-thon!'
def get_instagram_image():
instagram_api_url = 'https://api.instagram.com/v1/tags/spark/media/recent?client_id={}'.format(settings.CLIENT_ID)
data = requests.get(instagram_api_url).json()['data']
number_of_images = choose_number_of_images()
images_returned = []
for image in number_of_images:
images_returned.append(random.choice(data)['images']['low_resolution']['url'])
return images_returned
def get_tweets():
auth = tweepy.OAuthHandler(settings.CONSUMER_KEY, settings.CONSUMER_SECRET)
auth.set_access_token(settings.ACCESS_KEY, settings.ACCESS_SECRET)
api = tweepy.API(auth)
number_of_tweets = choose_number_of_tweets()
return tweets.items(limit=number_of_tweets)
def choose_number_of_images():
number = 3
return number
def choose_number_of_tweets():
number = 3
return number
if __name__ == '__main__':
# port = int(os.environ.get("PORT", 5000))
# app.run(host='0.0.0.0', port=port)
| Python | 0 |
c772951ffbe06be23ff56d0281b78d7b9eac456b | Add option to generate executable name from the current branch | pycket/entry_point.py | pycket/entry_point.py | #! /usr/bin/env python
# -*- coding: utf-8 -*-
#
from pycket.expand import load_json_ast_rpython, expand_to_ast, PermException
from pycket.interpreter import interpret_one, ToplevelEnv, interpret_module, GlobalConfig
from pycket.error import SchemeException
from pycket.option_helper import parse_args, ensure_json_ast
from pycket.values import W_String
from rpython.rlib import jit
# _____ Define and setup target ___
def entry_point(argv):
try:
return actual_entry(argv)
except SchemeException, e:
print "ERROR:", e.msg
raise # to see interpreter-level traceback
def actual_entry(argv):
jit.set_param(None, "trace_limit", 20000)
config, names, args, retval = parse_args(argv)
if retval != 0 or config is None:
return retval
args_w = [W_String(arg) for arg in args]
module_name, json_ast = ensure_json_ast(config, names)
if json_ast is None:
ast = expand_to_ast(module_name)
else:
ast = load_json_ast_rpython(json_ast)
GlobalConfig.load(ast)
env = ToplevelEnv()
env.commandline_arguments = args_w
env.module_env.add_module(module_name, ast)
val = interpret_module(ast, env)
return 0
def target(driver, args):
if "--with-branch" in args:
import subprocess
base_name = subprocess.check_output(["git", "rev-parse", "--abbrev-ref", "HEAD"]).strip()
else:
base_name = 'pycket-'
if driver.config.translation.jit:
driver.exe_name = base_name + '-%(backend)s'
else:
driver.exe_name = base_name + '-%(backend)s-nojit'
return entry_point, None
| #! /usr/bin/env python
# -*- coding: utf-8 -*-
#
from pycket.expand import load_json_ast_rpython, expand_to_ast, PermException
from pycket.interpreter import interpret_one, ToplevelEnv, interpret_module, GlobalConfig
from pycket.error import SchemeException
from pycket.option_helper import parse_args, ensure_json_ast
from pycket.values import W_String
from rpython.rlib import jit
# _____ Define and setup target ___
def entry_point(argv):
try:
return actual_entry(argv)
except SchemeException, e:
print "ERROR:", e.msg
raise # to see interpreter-level traceback
def actual_entry(argv):
jit.set_param(None, "trace_limit", 20000)
config, names, args, retval = parse_args(argv)
if retval != 0 or config is None:
return retval
args_w = [W_String(arg) for arg in args]
module_name, json_ast = ensure_json_ast(config, names)
if json_ast is None:
ast = expand_to_ast(module_name)
else:
ast = load_json_ast_rpython(json_ast)
GlobalConfig.load(ast)
env = ToplevelEnv()
env.commandline_arguments = args_w
env.module_env.add_module(module_name, ast)
val = interpret_module(ast, env)
return 0
def target(driver, args):
if driver.config.translation.jit:
driver.exe_name = 'pycket-%(backend)s'
else:
driver.exe_name = 'pycket-%(backend)s-nojit'
return entry_point, None
| Python | 0.000001 |
b5431978c51107ba9e6475fc489fbd6dc7110332 | clean up camera class | pymba/vimba_camera.py | pymba/vimba_camera.py | from ctypes import byref, sizeof
from typing import Optional
from .vimba_object import VimbaObject
from .vimba_exception import VimbaException
from .vimba_frame import VimbaFrame
from . import vimba_c
class VimbaCamera(VimbaObject):
"""
A Vimba camera object.
"""
def __init__(self, id_string: str):
self._id_string = id_string.encode()
super().__init__()
self._info = self._get_info()
@property
def id_string(self) -> str:
return self._id_string.decode()
def _get_info(self) -> vimba_c.VmbCameraInfo:
"""
Get info of the camera. Does not require the camera to be opened.
"""
vmb_camera_info = vimba_c.VmbCameraInfo()
error = vimba_c.vmb_camera_info_query(self._id_string,
byref(vmb_camera_info),
sizeof(vmb_camera_info))
if error:
raise VimbaException(error)
return vmb_camera_info
def open(self, camera_access_mode: Optional[int] = VimbaObject.VMB_ACCESS_MODE_FULL):
"""
Open the camera with requested access mode.
"""
error = vimba_c.vmb_camera_open(self._id_string,
camera_access_mode,
byref(self._handle))
if error:
raise VimbaException(error)
def close(self):
"""
Close the camera.
"""
error = vimba_c.vmb_camera_close(self._handle)
if error:
raise VimbaException(error)
def revoke_all_frames(self):
"""
Revoke all frames assigned to the camera.
"""
error = vimba_c.vmb_frame_revoke_all(self._handle)
if error:
raise VimbaException(error)
def start_capture(self):
"""
Prepare the API for incoming frames.
"""
error = vimba_c.vmb_capture_start(self._handle)
if error:
raise VimbaException(error)
def end_capture(self):
"""
Stop the API from being able to receive frames.
"""
error = vimba_c.vmb_capture_end(self._handle)
if error:
raise VimbaException(error)
def flush_capture_queue(self):
"""
Flush the capture queue.
"""
error = vimba_c.vmb_capture_queue_flush(self._handle)
if error:
raise VimbaException(error)
def create_frame(self) -> VimbaFrame:
"""
Creates and returns a new frame object. Multiple frames per camera can therefore be returned.
"""
return VimbaFrame(self)
| # -*- coding: utf-8 -*-
from __future__ import absolute_import
from . import vimba_structure as structs
from .vimba_object import VimbaObject
from .vimba_exception import VimbaException
from .vimba_frame import VimbaFrame
from .vimba_dll import VimbaDLL
from ctypes import *
# camera features are automatically readable as object attributes.
class VimbaCamera(VimbaObject):
"""
A Vimba camera object. This class provides the minimal access
to Vimba functions required to control the camera.
"""
@property
def cameraIdString(self):
return self._cameraIdString.decode()
# own handle is inherited as self._handle
def __init__(self, cameraIdString):
# call super constructor
super(VimbaCamera, self).__init__()
# set ID
self._cameraIdString = cameraIdString.encode()
# set own info
self._info = self._getInfo()
def getInfo(self):
"""
Get info of the camera. Does not require
the camera to be opened.
:returns: VimbaCameraInfo object -- camera information.
"""
return self._info
def _getInfo(self):
"""
Get info of the camera. Does not require
the camera to be opened.
:returns: VimbaCameraInfo object -- camera information.
"""
# args for Vimba call
cameraInfo = structs.VimbaCameraInfo()
# Vimba DLL will return an error code
errorCode = VimbaDLL.cameraInfoQuery(self._cameraIdString,
byref(cameraInfo),
sizeof(cameraInfo))
if errorCode != 0:
raise VimbaException(errorCode)
return cameraInfo
def openCamera(self, cameraAccessMode=1):
"""
Open the camera with requested access mode
Available access modes:
0 : VmbAccessModeNone
1 : VmbAccessModeFull
2 : VmbAccessModeRead
3 : VmbAccessModeConfig
4 : VmbAccessModeLite
"""
# args for Vimba call
errorCode = VimbaDLL.cameraOpen(self._cameraIdString,
cameraAccessMode,
byref(self._handle))
if errorCode != 0:
raise VimbaException(errorCode)
def closeCamera(self):
"""
Close the camera.
"""
errorCode = VimbaDLL.cameraClose(self._handle)
if errorCode != 0:
raise VimbaException(errorCode)
def revokeAllFrames(self):
"""
Revoke all frames assigned to the camera.
"""
errorCode = VimbaDLL.frameRevokeAll(self._handle)
if errorCode != 0:
raise VimbaException(errorCode)
def startCapture(self):
"""
Prepare the API for incoming frames.
"""
errorCode = VimbaDLL.captureStart(self._handle)
if errorCode != 0:
raise VimbaException(errorCode)
def endCapture(self):
"""
Stop the API from being able to receive frames.
"""
errorCode = VimbaDLL.captureEnd(self._handle)
if errorCode != 0:
raise VimbaException(errorCode)
def flushCaptureQueue(self):
"""
Flush the capture queue.
"""
errorCode = VimbaDLL.captureQueueFlush(self._handle)
if errorCode != 0:
raise VimbaException(errorCode)
# method for easy frame creation
def getFrame(self):
"""
Creates and returns a new frame object. Multiple frames
per camera can therefore be returned.
:returns: VimbaFrame object -- the new frame.
"""
return VimbaFrame(self)
| Python | 0 |
17fe4613518def551e637764e644c5d58b1665d9 | Add BodeAnalyser instrument to instrument table | pymoku/instruments.py | pymoku/instruments.py | import sys
from . import _instrument
from . import _oscilloscope
from . import _waveform_generator
from . import _phasemeter
from . import _specan
from . import _lockinamp
from . import _datalogger
from . import _bodeanalyser
from . import _stream_instrument
from . import _frame_instrument
from . import _input_instrument
''' Preferred import point. Aggregates the separate instruments and helper classes
to flatten the import heirarchy (e.g. pymoku.instruments.Oscilloscope rather
than pymoku.instruments._oscilloscope.Oscilloscope)
'''
InstrumentData = _frame_instrument.InstrumentData
VoltsData = _oscilloscope.VoltsData
SpectrumData = _specan.SpectrumData
MokuInstrument = _instrument.MokuInstrument
Oscilloscope = _oscilloscope.Oscilloscope
WaveformGenerator = _waveform_generator.WaveformGenerator
Phasemeter = _phasemeter.Phasemeter
SpectrumAnalyser = _specan.SpectrumAnalyser
LockInAmp = _lockinamp.LockInAmp
Datalogger = _datalogger.Datalogger
BodeAnalyser = _bodeanalyser.BodeAnalyser
id_table = {
1: Oscilloscope,
2: SpectrumAnalyser,
3: Phasemeter,
4: WaveformGenerator,
5: None,
6: None,
7: Datalogger,
8: LockInAmp,
9: BodeAnalyser,
10: None,
11: None,
12: None,
13: None,
14: None
}
| import sys
from . import _instrument
from . import _oscilloscope
from . import _waveform_generator
from . import _phasemeter
from . import _specan
from . import _lockinamp
from . import _datalogger
from . import _bodeanalyser
from . import _stream_instrument
from . import _frame_instrument
from . import _input_instrument
''' Preferred import point. Aggregates the separate instruments and helper classes
to flatten the import heirarchy (e.g. pymoku.instruments.Oscilloscope rather
than pymoku.instruments._oscilloscope.Oscilloscope)
'''
InstrumentData = _frame_instrument.InstrumentData
VoltsData = _oscilloscope.VoltsData
SpectrumData = _specan.SpectrumData
MokuInstrument = _instrument.MokuInstrument
Oscilloscope = _oscilloscope.Oscilloscope
WaveformGenerator = _waveform_generator.WaveformGenerator
Phasemeter = _phasemeter.Phasemeter
SpectrumAnalyser = _specan.SpectrumAnalyser
LockInAmp = _lockinamp.LockInAmp
Datalogger = _datalogger.Datalogger
BodeAnalyser = _bodeanalyser.BodeAnalyser
id_table = {
1: Oscilloscope,
2: SpectrumAnalyser,
3: Phasemeter,
4: WaveformGenerator,
5: None,
6: None,
7: Datalogger,
8: LockInAmp,
9: None,
10: None,
11: None,
12: None,
13: None,
14: None
}
| Python | 0 |
5a506ff7005f09b14faa4d6036563e0125ca00f4 | Fix indent | pymystrom/__init__.py | pymystrom/__init__.py | """
Copyright (c) 2015-2017 Fabian Affolter <fabian@affolter-engineering.ch>
Licensed under MIT. All rights reserved.
"""
import requests
from . import exceptions
class MyStromPlug(object):
"""A class for a myStrom switch."""
def __init__(self, host):
"""Initialize the switch."""
self.resource = 'http://{}'.format(host)
self.timeout = 5
self.data = None
self.state = None
self.consumption = 0
def set_relay_on(self):
"""Turn the relay on."""
if not self.get_relay_state():
try:
request = requests.get(
'{}/relay'.format(self.resource), params={'state': '1'},
timeout=self.timeout)
if request.status_code == 200:
self.data['relay'] = True
except requests.exceptions.ConnectionError:
raise exceptions.MyStromConnectionError()
def set_relay_off(self):
"""Turn the relay off."""
if self.get_relay_state():
try:
request = requests.get(
'{}/relay'.format(self.resource), params={'state': '0'},
timeout=self.timeout)
if request.status_code == 200:
self.data['relay'] = False
except requests.exceptions.ConnectionError:
raise exceptions.MyStromConnectionError()
def get_status(self):
"""Get the details from the switch."""
try:
request = requests.get(
'{}/report'.format(self.resource), timeout=self.timeout)
self.data = request.json()
return self.data
except (requests.exceptions.ConnectionError, ValueError):
raise exceptions.MyStromConnectionError()
def get_relay_state(self):
"""Get the relay state."""
self.get_status()
try:
self.state = self.data['relay']
except TypeError:
self.state = False
return bool(self.state)
def get_consumption(self):
"""Get current power consumption in mWh."""
self.get_status()
try:
self.consumption = self.data['power']
except TypeError:
self.consumption = 0
return self.consumption
| """
Copyright (c) 2015-2017 Fabian Affolter <fabian@affolter-engineering.ch>
Licensed under MIT. All rights reserved.
"""
import requests
from . import exceptions
class MyStromPlug(object):
"""A class for a myStrom switch."""
def __init__(self, host):
"""Initialize the switch."""
self.resource = 'http://{}'.format(host)
self.timeout = 5
self.data = None
self.state = None
self.consumption = 0
def set_relay_on(self):
"""Turn the relay on."""
if not self.get_relay_state():
try:
request = requests.get('{}/relay'.format(self.resource),
params={'state': '1'},
timeout=self.timeout)
if request.status_code == 200:
self.data['relay'] = True
except requests.exceptions.ConnectionError:
raise exceptions.MyStromConnectionError()
def set_relay_off(self):
"""Turn the relay off."""
if self.get_relay_state():
try:
request = requests.get('{}/relay'.format(self.resource),
params={'state': '0'},
timeout=self.timeout)
if request.status_code == 200:
self.data['relay'] = False
except requests.exceptions.ConnectionError:
raise exceptions.MyStromConnectionError()
def get_status(self):
"""Get the details from the switch."""
try:
request = requests.get('{}/report'.format(self.resource),
timeout=self.timeout)
self.data = request.json()
return self.data
except (requests.exceptions.ConnectionError, ValueError):
raise exceptions.MyStromConnectionError()
def get_relay_state(self):
"""Get the relay state."""
self.get_status()
try:
self.state = self.data['relay']
except TypeError:
self.state = False
return bool(self.state)
def get_consumption(self):
"""Get current power consumption in mWh."""
self.get_status()
try:
self.consumption = self.data['power']
except TypeError:
self.consumption = 0
return self.consumption
| Python | 0.000854 |
c034282423d47a6530ed0bb77c54e133de72115b | add more verbose output to PushwooshClient when debut=True | pypushwoosh/client.py | pypushwoosh/client.py | import logging
import requests
from .base import PushwooshBaseClient
log = logging.getLogger('pypushwoosh.client.log')
class PushwooshClient(PushwooshBaseClient):
"""
Implementation of the Pushwoosh API Client.
"""
headers = {'User-Agent': 'PyPushwooshClient',
'Content-Type': 'application/json',
'Accept': 'application/json'}
def __init__(self, timeout=None):
PushwooshBaseClient.__init__(self)
self.timeout = timeout
def path(self, command):
return '{}://{}/'.format(self.scheme, self.hostname) + '/'.join((self.endpoint, self.version,
command.command_name))
def invoke(self, command):
PushwooshBaseClient.invoke(self, command)
url = self.path(command)
payload = command.render()
if self.debug:
log.debug('Client: %s' % self.__class__.__name__)
log.debug('Command: %s' % payload)
log.debug('Request URL: %s' % url)
log.debug('Request method: %s' % self.method)
log.debug('Request headers: %s' % self.headers)
r = requests.post(url, data=payload, headers=self.headers, timeout=self.timeout)
if self.debug:
log.debug('Response version: %s' % r.raw.version)
log.debug('Response code: %s' % r.status_code)
log.debug('Response phrase: %s' % r.reason)
log.debug('Response headers: %s' % r.headers)
log.debug('Response payload: %s' % r.json())
return r.json()
| import logging
import requests
from .base import PushwooshBaseClient
log = logging.getLogger('pypushwoosh.client.log')
class PushwooshClient(PushwooshBaseClient):
"""
Implementation of the Pushwoosh API Client.
"""
headers = {'User-Agent': 'PyPushwooshClient',
'Content-Type': 'application/json',
'Accept': 'application/json'}
def __init__(self, timeout=None):
PushwooshBaseClient.__init__(self)
self.timeout = timeout
def path(self, command):
return '{}://{}/'.format(self.scheme, self.hostname) + '/'.join((self.endpoint, self.version,
command.command_name))
def invoke(self, command):
PushwooshBaseClient.invoke(self, command)
if self.debug:
log.debug('Client: %s' % self.__class__.__name__)
log.debug('Command: %s' % command.render())
log.debug('Request URL: %s://%s%s' % (self.scheme, self.hostname, self.path(command)))
log.debug('Request method: %s' % self.method)
log.debug('Request headers: %s' % self.headers)
r = requests.post(self.path(command), data=command.render(), headers=self.headers, timeout=self.timeout)
if self.debug:
log.debug('Response version: %s' % r.raw.version)
log.debug('Response code: %s' % r.status_code)
log.debug('Response phrase: %s' % r.reason)
log.debug('Response headers: %s' % r.headers)
return r.json()
| Python | 0.003164 |
b7ed71cc0b620f460a0d24eeef7891e9770fc39e | Modify & Access time in pyls. | pysh/shell/builtin.py | pysh/shell/builtin.py | import collections
import csv
import datetime
import os
import pwd
import StringIO
from pysh.shell.pycmd import register_pycmd
from pysh.shell.pycmd import pycmd
from pysh.shell.pycmd import IOType
from pysh.shell.table import Table
def file_to_array(f):
return map(lambda line: line.rstrip('\r\n'), f.readlines())
class Permission(int):
def __init__(self, val):
int.__init__(self, val)
def __str__(self):
return ''.join((self.__to_rwx(self >> 6),
self.__to_rwx(self >> 3),
self.__to_rwx(self >> 0)))
def __to_rwx(self, rwx):
result = ['-'] * 3
if rwx & (1 << 2):
result[0] = 'r'
if rwx & (1 << 1):
result[1] = 'w'
if rwx & (1 << 0):
result[2] = 'x'
return ''.join(result)
@pycmd(name='echo', inType=IOType.No)
def pycmd_echo(args, input):
line = []
for arg in args[1:]:
if not isinstance(arg, basestring) and (
isinstance(arg, collections.Iterable)):
if line:
yield ' '.join(line)
line = []
for e in arg:
yield e
else:
line.append(str(arg))
if line:
yield ' '.join(line)
@pycmd(name='map')
def pycmd_map(args, input):
assert len(args) == 2
if isinstance(input, file):
input = file_to_array(input)
f = args[1]
assert callable(f)
return (f(x) for x in input)
@pycmd(name='filter')
def pycmd_filter(args, input):
assert len(args) == 2
if isinstance(input, file):
input = file_to_array(input)
cond = args[1]
assert callable(cond)
for x in input:
if cond(x):
yield x
@pycmd(name='reduce')
def pycmd_reduce(args, input):
assert len(args) == 2
if isinstance(input, file):
input = file_to_array(input)
f = args[1]
assert callable(f)
return [reduce(f, input)]
@pycmd(name='readcsv')
def pycmd_readcsv(args, input):
return csv.reader(input)
@pycmd(name='pyls')
def pycmd_pls(args, input):
table = Table(['mode', 'user', 'group', 'mtime', 'atime', 'path'])
for arg in args[1:]:
stat = os.stat(arg)
user = pwd.getpwuid(stat.st_uid).pw_name
group = pwd.getpwuid(stat.st_gid).pw_name
permission = stat.st_mode & 0777
mtime = datetime.datetime.fromtimestamp(stat.st_mtime)
atime = datetime.datetime.fromtimestamp(stat.st_atime)
table.add_row([Permission(permission), user, group, mtime, atime, arg])
return table
@pycmd(name='where')
def pycmd_pls(args, input):
assert len(args) == 2
row = list(input)[0]
table = row.table()
return table.where(args[1])
@pycmd(name='orderby')
def pycmd_pls(args, input):
assert len(args) == 2
row = list(input)[0]
table = row.table()
return table.orderby(args[1])
@pycmd(name='cd', inType=IOType.No, outType=IOType.No)
def pycmd_cd(args, input):
assert len(args) == 2 or len(args) == 1
if len(args) == 2:
dir = args[1]
else:
dir = os.environ.get('HOME', '')
if dir:
os.chdir(dir)
return ()
| import collections
import csv
import os
import pwd
import StringIO
from pysh.shell.pycmd import register_pycmd
from pysh.shell.pycmd import pycmd
from pysh.shell.pycmd import IOType
from pysh.shell.table import Table
def file_to_array(f):
return map(lambda line: line.rstrip('\r\n'), f.readlines())
class Permission(int):
def __init__(self, val):
int.__init__(self, val)
def __str__(self):
return ''.join((self.__to_rwx(self >> 6),
self.__to_rwx(self >> 3),
self.__to_rwx(self >> 0)))
def __to_rwx(self, rwx):
result = ['-'] * 3
if rwx & (1 << 2):
result[0] = 'r'
if rwx & (1 << 1):
result[1] = 'w'
if rwx & (1 << 0):
result[2] = 'x'
return ''.join(result)
@pycmd(name='echo', inType=IOType.No)
def pycmd_echo(args, input):
line = []
for arg in args[1:]:
if not isinstance(arg, basestring) and (
isinstance(arg, collections.Iterable)):
if line:
yield ' '.join(line)
line = []
for e in arg:
yield e
else:
line.append(str(arg))
if line:
yield ' '.join(line)
@pycmd(name='map')
def pycmd_map(args, input):
assert len(args) == 2
if isinstance(input, file):
input = file_to_array(input)
f = args[1]
assert callable(f)
return (f(x) for x in input)
@pycmd(name='filter')
def pycmd_filter(args, input):
assert len(args) == 2
if isinstance(input, file):
input = file_to_array(input)
cond = args[1]
assert callable(cond)
for x in input:
if cond(x):
yield x
@pycmd(name='reduce')
def pycmd_reduce(args, input):
assert len(args) == 2
if isinstance(input, file):
input = file_to_array(input)
f = args[1]
assert callable(f)
return [reduce(f, input)]
@pycmd(name='readcsv')
def pycmd_readcsv(args, input):
return csv.reader(input)
@pycmd(name='pyls')
def pycmd_pls(args, input):
table = Table(['mode', 'user', 'group', 'path'])
for arg in args[1:]:
stat = os.stat(arg)
user = pwd.getpwuid(stat.st_uid).pw_name
group = pwd.getpwuid(stat.st_gid).pw_name
permission = stat.st_mode & 0777
table.add_row([Permission(permission), user, group, arg])
return table
@pycmd(name='where')
def pycmd_pls(args, input):
assert len(args) == 2
row = list(input)[0]
table = row.table()
return table.where(args[1])
@pycmd(name='orderby')
def pycmd_pls(args, input):
assert len(args) == 2
row = list(input)[0]
table = row.table()
return table.orderby(args[1])
@pycmd(name='cd', inType=IOType.No, outType=IOType.No)
def pycmd_cd(args, input):
assert len(args) == 2 or len(args) == 1
if len(args) == 2:
dir = args[1]
else:
dir = os.environ.get('HOME', '')
if dir:
os.chdir(dir)
return ()
| Python | 0 |
933e3193bbd1ceb45d33a9b2dc37f3bb80b5bc7b | fix in broadcasting code | broadcast/broadcast_service.py | broadcast/broadcast_service.py | #!/usr/bin/python
#broadcast_service.py
#
# <<<COPYRIGHT>>>
#
#
#
#
"""
.. module:: broadcast_service
@author: Veselin
"""
#------------------------------------------------------------------------------
_Debug = True
_DebugLevel = 6
#------------------------------------------------------------------------------
# This is used to be able to execute this module directly from command line.
if __name__ == '__main__':
import sys, os.path as _p
sys.path.insert(0, _p.abspath(_p.join(_p.dirname(_p.abspath(sys.argv[0])), '..')))
#------------------------------------------------------------------------------
import datetime
import random
import string
import json
#------------------------------------------------------------------------------
from logs import lg
from lib import packetid
from crypt import signed
from crypt import key
from p2p import commands
from userid import my_id
#------------------------------------------------------------------------------
def prepare_broadcast_message(owner, payload):
tm = datetime.datetime.utcnow().strftime('%Y%m%d%H%M%S')
rnd = ''.join(random.choice(string.ascii_uppercase) for _ in range(4))
msgid = '%s:%s:%s' % (tm, rnd, owner)
msg = [
('owner', owner),
('started', tm),
('id', msgid),
('payload', payload),
]
# owner_sign = key.Sign(key.Hash(str(msg)))
msg = {k:v for k, v in msg}
# msg['owner_sign'] = owner_sign
return msg
def read_message_from_packet(newpacket):
try:
msg = json.loads(newpacket.Payload)
except:
lg.exc()
return None
# TODO verify owner signature and creator ID
return msg
def packet_for_broadcaster(broadcaster_idurl, json_data):
if 'broadcaster' not in json_data:
json_data['broadcaster'] = broadcaster_idurl
return signed.Packet(commands.Broadcast(),
json_data['owner'],
my_id.getLocalID(),
json_data['id'],
json.dumps(json_data),
broadcaster_idurl,)
def packet_for_listener(listener_idurl, json_data):
if 'broadcaster' not in json_data:
json_data['broadcaster'] = my_id.getLocalID()
return signed.Packet(commands.Broadcast(),
json_data['owner'],
my_id.getLocalID(),
json_data['id'],
json.dumps(json_data),
listener_idurl,)
#------------------------------------------------------------------------------
def send_broadcast_message(payload):
from broadcast import broadcaster_node
from broadcast import broadcast_listener
msg = prepare_broadcast_message(my_id.getLocalID(), payload)
if broadcaster_node.A():
broadcaster_node.A('new-outbound-message', (msg, None))
elif broadcast_listener.A():
if broadcast_listener.A().state == 'OFFLINE':
broadcast_listener.A('connect')
broadcast_listener.A('outbound-message', msg)
else:
lg.warn('nor broadcaster_node(), nor broadcast_listener() exist')
return None
return msg
#------------------------------------------------------------------------------
def on_incoming_broadcast_message(json_msg):
lg.out(2, 'service_broadcasting._on_incoming_broadcast_message : %r' % json_msg)
#------------------------------------------------------------------------------
def main():
pass
#------------------------------------------------------------------------------
if __name__ == '__main__':
main()
| #!/usr/bin/python
#broadcast_service.py
#
# <<<COPYRIGHT>>>
#
#
#
#
"""
.. module:: broadcast_service
@author: Veselin
"""
#------------------------------------------------------------------------------
_Debug = True
_DebugLevel = 6
#------------------------------------------------------------------------------
# This is used to be able to execute this module directly from command line.
if __name__ == '__main__':
import sys, os.path as _p
sys.path.insert(0, _p.abspath(_p.join(_p.dirname(_p.abspath(sys.argv[0])), '..')))
#------------------------------------------------------------------------------
import datetime
import random
import string
import json
#------------------------------------------------------------------------------
from logs import lg
from lib import packetid
from crypt import signed
from crypt import key
from p2p import commands
from userid import my_id
#------------------------------------------------------------------------------
def prepare_broadcast_message(owner, payload):
tm = datetime.datetime.utcnow().strftime('%Y%m%d%H%M%S')
rnd = ''.join(random.choice(string.ascii_uppercase) for _ in range(4))
msgid = '%s:%s:%s' % (tm, rnd, owner)
msg = [
('owner', owner),
('started', tm),
('id', msgid),
('payload', payload),
]
# owner_sign = key.Sign(key.Hash(str(msg)))
msg = {k:v for k, v in msg}
# msg['owner_sign'] = owner_sign
return msg
def read_message_from_packet(newpacket):
try:
msg = json.loads(newpacket.Payload)
except:
lg.exc()
return None
# TODO verify owner signature and creator ID
return msg
def packet_for_broadcaster(broadcaster_idurl, json_data):
if 'broadcaster' not in json_data:
json_data['broadcaster'] = broadcaster_idurl
return signed.Packet(commands.Broadcast(),
json_data['owner'],
json_data['owner'],
json_data['id'],
json.dumps(json_data),
broadcaster_idurl,)
def packet_for_listener(listener_idurl, json_data):
if 'broadcaster' not in json_data:
json_data['broadcaster'] = my_id.getLocalID()
return signed.Packet(commands.Broadcast(),
json_data['owner'],
my_id.getLocalID(),
json_data['id'],
json.dumps(json_data),
listener_idurl,)
#------------------------------------------------------------------------------
def send_broadcast_message(payload):
from broadcast import broadcaster_node
from broadcast import broadcast_listener
msg = prepare_broadcast_message(my_id.getLocalID(), payload)
if broadcaster_node.A():
broadcaster_node.A('new-outbound-message', (msg, None))
elif broadcast_listener.A():
if broadcast_listener.A().state == 'OFFLINE':
broadcast_listener.A('connect')
broadcast_listener.A('outbound-message', msg)
else:
lg.warn('nor broadcaster_node(), nor broadcast_listener() exist')
return None
return msg
#------------------------------------------------------------------------------
def on_incoming_broadcast_message(json_msg):
lg.out(2, 'service_broadcasting._on_incoming_broadcast_message : %r' % json_msg)
#------------------------------------------------------------------------------
def main():
pass
#------------------------------------------------------------------------------
if __name__ == '__main__':
main()
| Python | 0.00014 |
f56cc7acae3c3b295febafec384bcfdf3b2dcee0 | koda za python | Koda.py | Koda.py |
import csv
import numpy as np
import matplotlib.pyplot as plt
from datetime import datetime
from scipy.stats import multivariate_normal as mvn
from scipy.stats import beta
from csv import DictReader
import pandas as pa
def fileReaderSmucNesrece():
fp = open("evidencanesrecnasmuciscihV1.csv", "rt", encoding=" utf -8 ")
reader = DictReader(fp)
return [line for line in reader]
SmucNes = fileReaderSmucNesrece()
SmucNes = pa.DataFrame(SmucNes)
titles = []
for i in SmucNes:
titles.append(i)
def tf(t):
st = 0
for i in SmucNes[t]:
if i == 'Da':
SmucNes.set_value(st, t, "True")
elif i == "Ne":
SmucNes.set_value(st, t, "False")
else:
SmucNes.set_value(st, t, "")
st += 1
def tfmake():
num = [3, 4, 5, 13, 14, 15]
for i in num:
tf(titles[i])
num2 = [9, 16, 17, 19, 20 ]
for i in num2:
udA(titles[i])
def udA(t):
dic = {}
temp=[]
st = 0
for i in SmucNes[t]:
if len(i)>0:
temp = i.split(" ", 1)
if temp[0] not in dic:
if temp[0] == 25:
dic[25] = 'ostalo'
elif temp[0] == 'NOÈNA':
dic[7] = 'NOÈNA'
else:
if len(temp) > 1:
dic[temp[0]] = temp[1]
if temp[0].isdigit():
SmucNes.set_value(st, t, temp[0])
else:
for i in dic:
if temp[0] == dic[i]:
SmucNes.set_value(st, t, i)
SmucNes.set_value(st, t, "")
else:
SmucNes.set_value(st, t, "")
st += 1
tfmake()
print(SmucNes)
| from csv import DictReader
import pandas as ps
def fileReaderSmucNesrece():
fp = open("evidencanesrecnasmuciscihV1.csv", "rt", encoding=" utf -8 ")
reader = DictReader(fp)
return [line for line in reader] #branje
SmucNes = fileReaderSmucNesrece()
SmucNes = ps.DataFrame(SmucNes) # uporaba pandas
print(SmucNes)
| Python | 0.999999 |
dd526ef40d3eb13681dca602b82390d66363783f | fix FlipDimension for LinearDimension | src/Mod/Draft/draftguitools/gui_dimension_ops.py | src/Mod/Draft/draftguitools/gui_dimension_ops.py | # ***************************************************************************
# * (c) 2009, 2010 Yorik van Havre <yorik@uncreated.net> *
# * (c) 2009, 2010 Ken Cline <cline@frii.com> *
# * (c) 2020 Eliud Cabrera Castillo <e.cabrera-castillo@tum.de> *
# * *
# * This file is part of the FreeCAD CAx development system. *
# * *
# * This program is free software; you can redistribute it and/or modify *
# * it under the terms of the GNU Lesser General Public License (LGPL) *
# * as published by the Free Software Foundation; either version 2 of *
# * the License, or (at your option) any later version. *
# * for detail see the LICENCE text file. *
# * *
# * FreeCAD is distributed in the hope that it will be useful, *
# * but WITHOUT ANY WARRANTY; without even the implied warranty of *
# * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
# * GNU Library General Public License for more details. *
# * *
# * You should have received a copy of the GNU Library General Public *
# * License along with FreeCAD; if not, write to the Free Software *
# * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 *
# * USA *
# * *
# ***************************************************************************
"""Provides tools to modify Draft dimensions.
For example, a tool to flip the direction of the text in the dimension
as the normal is sometimes not correctly calculated automatically.
"""
## @package gui_dimension_ops
# \ingroup DRAFT
# \brief Provides tools to modify Draft dimensions.
from PySide.QtCore import QT_TRANSLATE_NOOP
import FreeCADGui as Gui
import draftutils.utils as utils
import draftguitools.gui_base as gui_base
from draftutils.translate import _tr
class FlipDimension(gui_base.GuiCommandNeedsSelection):
"""The Draft FlipDimension command definition.
Flip the normal direction of the selected dimensions.
It inherits `GuiCommandNeedsSelection` to set up the document
and other behavior. See this class for more information.
"""
def __init__(self):
super(Draft_FlipDimension, self).__init__(name=_tr("Flip dimension"))
def GetResources(self):
"""Set icon, menu and tooltip."""
_tip = ("Flip the normal direction of the selected dimensions "
"(linear, radial, angular).\n"
"If other objects are selected they are ignored.")
return {'Pixmap': 'Draft_FlipDimension',
'MenuText': QT_TRANSLATE_NOOP("Draft_FlipDimension",
"Flip dimension"),
'ToolTip': QT_TRANSLATE_NOOP("Draft_FlipDimension",
_tip)}
def Activated(self):
"""Execute when the command is called."""
super(Draft_FlipDimension, self).Activated()
for o in Gui.Selection.getSelection():
if utils.get_type(o) in ("Dimension",
"LinearDimension", "AngularDimension"):
self.doc.openTransaction("Flip dimension")
_cmd = "App.activeDocument()." + o.Name + ".Normal"
_cmd += " = "
_cmd += "App.activeDocument()." + o.Name + ".Normal.negative()"
Gui.doCommand(_cmd)
self.doc.commitTransaction()
self.doc.recompute()
Draft_FlipDimension = FlipDimension
Gui.addCommand('Draft_FlipDimension', FlipDimension())
| # ***************************************************************************
# * (c) 2009, 2010 Yorik van Havre <yorik@uncreated.net> *
# * (c) 2009, 2010 Ken Cline <cline@frii.com> *
# * (c) 2020 Eliud Cabrera Castillo <e.cabrera-castillo@tum.de> *
# * *
# * This file is part of the FreeCAD CAx development system. *
# * *
# * This program is free software; you can redistribute it and/or modify *
# * it under the terms of the GNU Lesser General Public License (LGPL) *
# * as published by the Free Software Foundation; either version 2 of *
# * the License, or (at your option) any later version. *
# * for detail see the LICENCE text file. *
# * *
# * FreeCAD is distributed in the hope that it will be useful, *
# * but WITHOUT ANY WARRANTY; without even the implied warranty of *
# * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
# * GNU Library General Public License for more details. *
# * *
# * You should have received a copy of the GNU Library General Public *
# * License along with FreeCAD; if not, write to the Free Software *
# * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 *
# * USA *
# * *
# ***************************************************************************
"""Provides tools to modify Draft dimensions.
For example, a tool to flip the direction of the text in the dimension
as the normal is sometimes not correctly calculated automatically.
"""
## @package gui_dimension_ops
# \ingroup DRAFT
# \brief Provides tools to modify Draft dimensions.
from PySide.QtCore import QT_TRANSLATE_NOOP
import FreeCADGui as Gui
import draftutils.utils as utils
import draftguitools.gui_base as gui_base
from draftutils.translate import _tr
class FlipDimension(gui_base.GuiCommandNeedsSelection):
"""The Draft FlipDimension command definition.
Flip the normal direction of the selected dimensions.
It inherits `GuiCommandNeedsSelection` to set up the document
and other behavior. See this class for more information.
"""
def __init__(self):
super(Draft_FlipDimension, self).__init__(name=_tr("Flip dimension"))
def GetResources(self):
"""Set icon, menu and tooltip."""
_tip = ("Flip the normal direction of the selected dimensions "
"(linear, radial, angular).\n"
"If other objects are selected they are ignored.")
return {'Pixmap': 'Draft_FlipDimension',
'MenuText': QT_TRANSLATE_NOOP("Draft_FlipDimension",
"Flip dimension"),
'ToolTip': QT_TRANSLATE_NOOP("Draft_FlipDimension",
_tip)}
def Activated(self):
"""Execute when the command is called."""
super(Draft_FlipDimension, self).Activated()
for o in Gui.Selection.getSelection():
if utils.get_type(o) in ("Dimension", "AngularDimension"):
self.doc.openTransaction("Flip dimension")
_cmd = "App.activeDocument()." + o.Name + ".Normal"
_cmd += " = "
_cmd += "App.activeDocument()." + o.Name + ".Normal.negative()"
Gui.doCommand(_cmd)
self.doc.commitTransaction()
self.doc.recompute()
Draft_FlipDimension = FlipDimension
Gui.addCommand('Draft_FlipDimension', FlipDimension())
| Python | 0 |
3b41b94b4ad7b249a2ff1040d6bf2d4759d48b14 | revise task error handling (exceptions bubble up now) | ape/main.py | ape/main.py | import argparse
import inspect
import importlib
import sys
import os
import traceback
from ape import tasks, TaskNotFound, FeatureNotFound, EnvironmentIncomplete
from featuremonkey import get_features_from_equation_file
def get_task_parser(task):
'''
construct an ArgumentParser for task
this function returns a tuple (parser, proxy_args)
if task accepts varargs only, proxy_args is True.
if task accepts only positional and explicit keyword args,
proxy args is False.
'''
args, varargs, keywords, defaults = inspect.getargspec(task)
defaults = defaults or []
parser = argparse.ArgumentParser(
prog='ape ' + task.__name__,
add_help=False,
description = task.__doc__,
formatter_class=argparse.ArgumentDefaultsHelpFormatter
)
posargslen = len(args) - len(defaults)
if varargs is None and keywords is None:
for idx, arg in enumerate(args):
if idx < posargslen:
parser.add_argument(arg)
else:
default = defaults[idx - posargslen]
parser.add_argument('--' + arg, default=default)
return parser, False
elif not args and varargs and not keywords and not defaults:
return parser, True
else:
raise
def invoke_task(task, args):
'''
invoke task with args
'''
parser, proxy_args = get_task_parser(task)
if proxy_args:
task(*args)
else:
pargs = parser.parse_args(args)
task(**vars(pargs))
def run(args, features=None):
'''
composes task modules of the selected features and calls the
task given by args
'''
features = features or []
for feature in features:
try:
feature_module = importlib.import_module(feature)
except ImportError:
raise FeatureNotFound(feature)
try:
tasks_module = importlib.import_module(feature + '.tasks')
tasks.superimpose(tasks_module)
except ImportError:
#No tasks module in feature ... skip it
pass
if len(args) < 2 or (len(args) == 2 and args[1] == 'help'):
tasks.help()
else:
taskname = args[1]
try:
task = tasks.get_task(taskname, include_helpers=False)
except TaskNotFound:
print 'Task "%s" not found! Use "ape help" to get usage information.' % taskname
else:
remaining_args = args[2:] if len(args) > 2 else []
invoke_task(task, remaining_args)
def main():
'''
entry point when used via command line
features are given using the environment variable ``PRODUCT_EQUATION``.
If it is not set, ``PRODUCT_EQUATION_FILENAME`` is tried: if it points
to an existing equation file that selection is used.
(if ``APE_PREPEND_FEATURES`` is given, those features are prepended)
If the list of features is empty, ``ape.EnvironmentIncomplete`` is raised.
'''
#check APE_PREPEND_FEATURES
features = os.environ.get('APE_PREPEND_FEATURES', '').split()
#features can be specified inline in PRODUCT_EQUATION
inline_features = os.environ.get('PRODUCT_EQUATION', '').split()
if inline_features:
#append inline features
features += inline_features
else:
#fallback: features are specified in equation file
feature_file = os.environ.get('PRODUCT_EQUATION_FILENAME', '')
if feature_file:
#append features from equation file
features += get_features_from_equation_file(feature_file)
else:
if not features:
raise EnvironmentIncomplete(
'Error running ape:\n'
'Either the PRODUCT_EQUATION or '
'PRODUCT_EQUATION_FILENAME environment '
'variable needs to be set!'
)
#run ape with features selected
run(sys.argv, features=features)
if __name__ == '__main__':
try:
main()
except Exception as e:
traceback.print_exc()
sys.exit(1)
| import argparse
import inspect
import importlib
import sys
import os
from ape import tasks, TaskNotFound, FeatureNotFound, EnvironmentIncomplete
from featuremonkey import get_features_from_equation_file
def get_task_parser(task):
'''
construct an ArgumentParser for task
this function returns a tuple (parser, proxy_args)
if task accepts varargs only, proxy_args is True.
if task accepts only positional and explicit keyword args,
proxy args is False.
'''
args, varargs, keywords, defaults = inspect.getargspec(task)
defaults = defaults or []
parser = argparse.ArgumentParser(
prog='ape ' + task.__name__,
add_help=False,
description = task.__doc__,
formatter_class=argparse.ArgumentDefaultsHelpFormatter
)
posargslen = len(args) - len(defaults)
if varargs is None and keywords is None:
for idx, arg in enumerate(args):
if idx < posargslen:
parser.add_argument(arg)
else:
default = defaults[idx - posargslen]
parser.add_argument('--' + arg, default=default)
return parser, False
elif not args and varargs and not keywords and not defaults:
return parser, True
else:
raise
def invoke_task(task, args):
'''
invoke task with args
'''
parser, proxy_args = get_task_parser(task)
if proxy_args:
task(*args)
else:
pargs = parser.parse_args(args)
task(**vars(pargs))
def run(args, features=None):
'''
composes task modules of the selected features and calls the
task given by args
'''
features = features or []
for feature in features:
try:
feature_module = importlib.import_module(feature)
except ImportError:
raise FeatureNotFound(feature)
try:
tasks_module = importlib.import_module(feature + '.tasks')
tasks.superimpose(tasks_module)
except ImportError:
#No tasks module in feature ... skip it
pass
if len(args) < 2 or (len(args) == 2 and args[1] == 'help'):
tasks.help()
else:
taskname = args[1]
try:
task = tasks.get_task(taskname, include_helpers=False)
except TaskNotFound:
print 'Task "%s" not found! Use "ape help" to get usage information.' % taskname
else:
remaining_args = args[2:] if len(args) > 2 else []
invoke_task(task, remaining_args)
def main():
'''
entry point when used via command line
features are given using the environment variable PRODUCT_EQUATION.
If it is not set, PRODUCT_EQUATION_FILENAME is tried: if it points
to an existing equation file that selection is used.
If that fails ``ape.EnvironmentIncomplete`` is raised.
'''
#check APE_PREPEND_FEATURES
features = os.environ.get('APE_PREPEND_FEATURES', '').split()
#features can be specified inline in PRODUCT_EQUATION
inline_features = os.environ.get('PRODUCT_EQUATION', '').split()
if inline_features:
#append inline features
features += inline_features
else:
#fallback: features are specified in equation file
feature_file = os.environ.get('PRODUCT_EQUATION_FILENAME', '')
if feature_file:
#append features from equation file
features += get_features_from_equation_file(feature_file)
else:
if not features:
print (
'Error running ape:\n'
'Either the PRODUCT_EQUATION or '
'PRODUCT_EQUATION_FILENAME environment '
'variable needs to be set!'
)
sys.exit(1)
#run ape with features selected
run(sys.argv, features=features)
if __name__ == '__main__':
main()
| Python | 0 |
17474a74a8c382c1cc5923c0e2128e4a4e776553 | Add method I am yet to use | eva/util/nutil.py | eva/util/nutil.py | import numpy as np
def to_rgb(pixels):
return np.repeat(pixels, 3 if pixels.shape[2] == 1 else 1, 2)
def binarize(arr, generate=np.random.uniform):
return (generate(size=arr.shape) < arr).astype('i')
def quantisize(arr, levels):
return (np.digitize(arr, np.arange(levels) / levels) - 1).astype('i')
| import numpy as np
def to_rgb(pixels):
return np.repeat(pixels, 3 if pixels.shape[2] == 1 else 1, 2)
def binarize(arr, generate=np.random.uniform):
return (generate(size=arr.shape) < arr).astype('i')
| Python | 0 |
57773e149ae2c7634e262b103a10cc35f6e138b2 | Ids are strings. | src/scim/schema/core.py | src/scim/schema/core.py | # -*- coding: utf-8 -*-
from . import attributes, types
class Metadata(attributes.Base):
"""A complex attribute containing resource metadata.
"""
#! The DateTime the Resource was added to the Service Provider.
created = attributes.Singular(types.DateTime)
#! The most recent DateTime the details of this Resource were updated at
#! the Service Provider. If this Resource has never been modified since
#! its initial creation, the value MUST be the same as the value of
#! created.
last_modified = attributes.Singular(types.DateTime)
#! The URI of the resource being returned.
#!
#! This value MUST be the same as the Location HTTP response header.
location = attributes.Singular(types.String)
#! The version of the Resource being returned.
#!
#! This value must be the same as the ETag HTTP response header.
version = attributes.Singular(types.String)
#! The names of the attributes to remove during a PATCH operation.
attributes = attributes.List(types.String)
class Base(attributes.Base):
"""Defines the base SCIM schema (v1.1 § 5.5).
Contains common attributes that all data models in the SCIM schema have.
"""
class Meta:
schema = 'urn:scim:schemas:core:1.0'
#! Unique identifier for the SCIM Resource as defined by the
#! Service Provider.
#!
#! Each representation of the Resource MUST include a non-empty id value.
#! This identifier MUST be unique across the Service Provider's entire
#! set of Resources. It MUST be a stable, non-reassignable identifier
#! that does not change when the same Resource is returned in
#! subsequent requests.
#!
#! The value of the id attribute is always issued by the Service Provider
#! and MUST never be specified by the Service Consumer.
id = attributes.Singular(types.String, required=True)
#! An identifier for the Resource as defined by the Service Consumer.
#!
#! The externalId may simplify identification of the Resource between
#! Service Consumer and Service provider by allowing the Consumer to
#! refer to the Resource with its own identifier, obviating the need to
#! store a local mapping between the local identifier of the Resource and
#! the identifier used by the Service Provider.
external_id = attributes.Singular(types.String)
#! A complex attribute containing resource metadata.
meta = attributes.Complex(Metadata, last=True)
| # -*- coding: utf-8 -*-
from . import attributes, types
class Metadata(attributes.Base):
"""A complex attribute containing resource metadata.
"""
#! The DateTime the Resource was added to the Service Provider.
created = attributes.Singular(types.DateTime)
#! The most recent DateTime the details of this Resource were updated at
#! the Service Provider. If this Resource has never been modified since
#! its initial creation, the value MUST be the same as the value of
#! created.
last_modified = attributes.Singular(types.DateTime)
#! The URI of the resource being returned.
#!
#! This value MUST be the same as the Location HTTP response header.
location = attributes.Singular(types.String)
#! The version of the Resource being returned.
#!
#! This value must be the same as the ETag HTTP response header.
version = attributes.Singular(types.String)
#! The names of the attributes to remove during a PATCH operation.
attributes = attributes.List(types.String)
class Base(attributes.Base):
"""Defines the base SCIM schema (v1.1 § 5.5).
Contains common attributes that all data models in the SCIM schema have.
"""
class Meta:
schema = 'urn:scim:schemas:core:1.0'
#! Unique identifier for the SCIM Resource as defined by the
#! Service Provider.
#!
#! Each representation of the Resource MUST include a non-empty id value.
#! This identifier MUST be unique across the Service Provider's entire
#! set of Resources. It MUST be a stable, non-reassignable identifier
#! that does not change when the same Resource is returned in
#! subsequent requests.
#!
#! The value of the id attribute is always issued by the Service Provider
#! and MUST never be specified by the Service Consumer.
id = attributes.Singular(types.Integer, required=True)
#! An identifier for the Resource as defined by the Service Consumer.
#!
#! The externalId may simplify identification of the Resource between
#! Service Consumer and Service provider by allowing the Consumer to
#! refer to the Resource with its own identifier, obviating the need to
#! store a local mapping between the local identifier of the Resource and
#! the identifier used by the Service Provider.
external_id = attributes.Singular(types.Integer)
#! A complex attribute containing resource metadata.
meta = attributes.Complex(Metadata, last=True)
| Python | 0.999944 |
fb2cfe4759fb98de644932af17a247428b2cc0f5 | Fix Auth API key check causing error 500s | api/auth.py | api/auth.py | from django.http import HttpResponseForbidden
from django.contrib.auth.models import AnonymousUser
from api.models import AuthAPIKey
class APIKeyAuthentication(object):
def is_authenticated(self, request):
params = {}
for key,value in request.GET.items():
params[key.lower()] = value
if 'apikey' in params:
try:
keyobj = AuthAPIKey.objects.get(key=params['apikey'])
except:
keyobj = None
if keyobj and keyobj.active:
request.user = AnonymousUser()
return True
return False
def challenge(self):
return HttpResponseForbidden('Access Denied, use a API Key')
| from django.http import HttpResponseForbidden
from django.contrib.auth.models import AnonymousUser
from api.models import AuthAPIKey
class APIKeyAuthentication(object):
def is_authenticated(self, request):
params = {}
for key,value in request.GET.items():
params[key.lower()] = value
if params['apikey']:
try:
keyobj = AuthAPIKey.objects.get(key=params['apikey'])
except:
keyobj = None
if keyobj and keyobj.active:
request.user = AnonymousUser()
return True
return False
def challenge(self):
return HttpResponseForbidden('Access Denied, use a API Key')
| Python | 0 |
1d67f755ea0f638c3cabef9e9359665d5b50ff86 | Clean up BeamConstellation | cactusbot/services/beam/constellation.py | cactusbot/services/beam/constellation.py | """Interact with Beam Constellation."""
import re
import json
from .. import WebSocket
class BeamConstellation(WebSocket):
"""Interact with Beam Constellation."""
URL = "wss://constellation.beam.pro"
RESPONSE_EXPR = re.compile(r'^(\d+)(.+)?$')
INTERFACE_EXPR = re.compile(r'^([a-z]+):\d+:([a-z]+)')
def __init__(self, channel, user):
super().__init__(self.URL)
assert isinstance(channel, int), "Channel ID must be an integer."
self.channel = channel
assert isinstance(user, int), "User ID must be an integer."
self.user = user
async def initialize(self, *interfaces):
"""Subscribe to Constellation interfaces."""
if not interfaces:
interfaces = (
"channel:{channel}:update",
"channel:{channel}:status",
"channel:{channel}:followed",
"channel:{channel}:subscribed",
"channel:{channel}:hosted",
"user:{user}:followed",
"user:{user}:subscribed",
"user:{user}:achievement"
)
interfaces = [
interface.format(channel=self.channel, user=self.user)
for interface in interfaces
]
packet = {
"type": "method",
"method": "livesubscribe",
"params": {
"events": interfaces
},
"id": 1
}
self.websocket.send_str(json.dumps(packet))
await self.receive()
self.logger.info(
"Successfully subscribed to Constellation interfaces.")
async def parse(self, packet):
"""Parse a chat packet."""
try:
packet = json.loads(packet)
except (TypeError, ValueError):
self.logger.exception("Invalid JSON: %s.", packet)
return None
else:
if packet.get("error") is not None:
self.logger.error(packet)
else:
self.logger.debug(packet)
return packet
| """Interact with Beam Constellation."""
from logging import getLogger
import re
import json
import asyncio
from .. import WebSocket
class BeamConstellation(WebSocket):
"""Interact with Beam Constellation."""
URL = "wss://constellation.beam.pro"
RESPONSE_EXPR = re.compile(r'^(\d+)(.+)?$')
INTERFACE_EXPR = re.compile(r'^([a-z]+):\d+:([a-z]+)')
def __init__(self, channel, user):
super().__init__(self.URL)
self.logger = getLogger(__name__)
assert isinstance(channel, int), "Channel ID must be an integer."
self.channel = channel
assert isinstance(user, int), "User ID must be an integer."
self.user = user
async def read(self, handle):
"""Read packets from the Constellation WebSocket."""
packet = await self.parse(await self.receive())
await super().read(handle)
async def initialize(self, *interfaces):
"""Subscribe to Constellation interfaces."""
if not interfaces:
interfaces = [
"channel:{channel}:update",
"channel:{channel}:status",
"channel:{channel}:followed",
"channel:{channel}:subscribed",
"channel:{channel}:hosted",
"user:{user}:followed",
"user:{user}:subscribed",
"user:{user}:achievement"
]
interfaces = list(
interface.format(channel=self.channel, user=self.user)
for interface in interfaces
)
packet = {
"type": "method",
"method": "livesubscribe",
"params": {
"events": interfaces
},
"id": 1
}
self.websocket.send_str(json.dumps(packet))
self.logger.info(
"Successfully subscribed to Constellation interfaces.")
| Python | 0.000041 |
948dadbd4aa262c86e561c56e7cd7748cdefa18b | Extend teacher column for institute courses | data_center/models.py | data_center/models.py | # -*- coding: utf-8 -*-
from datetime import datetime
from django.db import models
class Course(models.Model):
"""Course database schema"""
no = models.CharField(max_length=20, blank=True)
code = models.CharField(max_length=20, blank=True)
eng_title = models.CharField(max_length=200, blank=True)
chi_title = models.CharField(max_length=200, blank=True)
note = models.TextField(blank=True)
objective = models.CharField(max_length=80, blank=True)
time = models.CharField(max_length=80, blank=True)
time_token = models.CharField(max_length=80, blank=True)
teacher = models.CharField(max_length=120, blank=True) # Only save Chinese
room = models.CharField(max_length=80, blank=True)
credit = models.IntegerField(blank=True, null=True)
limit = models.IntegerField(blank=True, null=True)
prerequisite = models.BooleanField(default=False, blank=True)
clas = models.CharField(max_length=10, blank=True)
dept = models.CharField(max_length=10, blank=True)
serial = models.CharField(max_length=20, blank=True)
ge = models.CharField(max_length=80, blank=True)
hit = models.IntegerField(default=0)
syllabus = models.TextField(blank=True) # A html div
def __str__(self):
return self.no
class Department(models.Model):
dept_name = models.CharField(max_length=20, blank=True)
required_course = models.ManyToManyField(Course, blank=True)
def __unicode__(self):
return self.dept_name
class Announcement(models.Model):
TAG_CHOICE = (
('Info', '公告'),
('Bug', '已知問題'),
('Fix', '問題修復'),
)
content = models.TextField(blank=True)
time = models.DateTimeField(default=datetime.now)
tag = models.CharField(max_length=10, choices=TAG_CHOICE, default='Info')
def __unicode__(self):
return '%s|%s' % (self.time, self.tag)
| # -*- coding: utf-8 -*-
from datetime import datetime
from django.db import models
class Course(models.Model):
"""Course database schema"""
no = models.CharField(max_length=20, blank=True)
code = models.CharField(max_length=20, blank=True)
eng_title = models.CharField(max_length=200, blank=True)
chi_title = models.CharField(max_length=200, blank=True)
note = models.TextField(blank=True)
objective = models.CharField(max_length=80, blank=True)
time = models.CharField(max_length=80, blank=True)
time_token = models.CharField(max_length=80, blank=True)
teacher = models.CharField(max_length=80, blank=True) # Only save Chinese
room = models.CharField(max_length=80, blank=True)
credit = models.IntegerField(blank=True, null=True)
limit = models.IntegerField(blank=True, null=True)
prerequisite = models.BooleanField(default=False, blank=True)
clas = models.CharField(max_length=10, blank=True)
dept = models.CharField(max_length=10, blank=True)
serial = models.CharField(max_length=20, blank=True)
ge = models.CharField(max_length=80, blank=True)
hit = models.IntegerField(default=0)
syllabus = models.TextField(blank=True) # A html div
def __str__(self):
return self.no
class Department(models.Model):
dept_name = models.CharField(max_length=20, blank=True)
required_course = models.ManyToManyField(Course, blank=True)
def __unicode__(self):
return self.dept_name
class Announcement(models.Model):
TAG_CHOICE = (
('Info', '公告'),
('Bug', '已知問題'),
('Fix', '問題修復'),
)
content = models.TextField(blank=True)
time = models.DateTimeField(default=datetime.now)
tag = models.CharField(max_length=10, choices=TAG_CHOICE, default='Info')
def __unicode__(self):
return '%s|%s' % (self.time, self.tag)
| Python | 0 |
1e3ea59bb631bb78dd0525dcf92a96a6a39053d8 | fix hooks may not been assignment #148 | py12306/helpers/request.py | py12306/helpers/request.py | import requests
from requests.exceptions import *
from py12306.helpers.func import *
from requests_html import HTMLSession, HTMLResponse
requests.packages.urllib3.disable_warnings()
class Request(HTMLSession):
"""
请求处理类
"""
# session = {}
def save_to_file(self, url, path):
response = self.get(url, stream=True)
with open(path, 'wb') as f:
for chunk in response.iter_content(chunk_size=1024):
f.write(chunk)
return response
@staticmethod
def _handle_response(response, **kwargs) -> HTMLResponse:
"""
扩充 response
:param response:
:param kwargs:
:return:
"""
response = HTMLSession._handle_response(response, **kwargs)
expand_class(response, 'json', Request.json)
return response
def add_response_hook(self, hook):
hooks = self.hooks['response']
if not isinstance(hooks, list):
hooks = [hooks]
hooks.append(hook)
self.hooks['response'] = hooks
return self
def json(self, default={}):
"""
重写 json 方法,拦截错误
:return:
"""
from py12306.app import Dict
try:
result = self.old_json()
return Dict(result)
except:
return Dict(default)
def request(self, *args, **kwargs): # 拦截所有错误
try:
if not 'timeout' in kwargs:
from py12306.config import Config
kwargs['timeout'] = Config().TIME_OUT_OF_REQUEST
response = super().request(*args, **kwargs)
return response
except RequestException as e:
from py12306.log.common_log import CommonLog
if e.response:
response = e.response
else:
response = HTMLResponse(HTMLSession)
# response.status_code = 500
expand_class(response, 'json', Request.json)
response.reason = response.reason if response.reason else CommonLog.MESSAGE_RESPONSE_EMPTY_ERROR
return response
def cdn_request(self, url: str, cdn=None, method='GET', **kwargs):
from py12306.helpers.api import HOST_URL_OF_12306
from py12306.helpers.cdn import Cdn
if not cdn: cdn = Cdn.get_cdn()
url = url.replace(HOST_URL_OF_12306, cdn)
return self.request(method, url, headers={'Host': HOST_URL_OF_12306}, verify=False, **kwargs)
| import requests
from requests.exceptions import *
from py12306.helpers.func import *
from requests_html import HTMLSession, HTMLResponse
requests.packages.urllib3.disable_warnings()
class Request(HTMLSession):
"""
请求处理类
"""
# session = {}
def save_to_file(self, url, path):
response = self.get(url, stream=True)
with open(path, 'wb') as f:
for chunk in response.iter_content(chunk_size=1024):
f.write(chunk)
return response
@staticmethod
def _handle_response(response, **kwargs) -> HTMLResponse:
"""
扩充 response
:param response:
:param kwargs:
:return:
"""
response = HTMLSession._handle_response(response, **kwargs)
expand_class(response, 'json', Request.json)
return response
def add_response_hook(self, hook):
exist_hooks = self.hooks['response']
if not isinstance(exist_hooks, list): hooks = [exist_hooks]
hooks.append(hook)
self.hooks['response'] = hooks
return self
def json(self, default={}):
"""
重写 json 方法,拦截错误
:return:
"""
from py12306.app import Dict
try:
result = self.old_json()
return Dict(result)
except:
return Dict(default)
def request(self, *args, **kwargs): # 拦截所有错误
try:
if not 'timeout' in kwargs:
from py12306.config import Config
kwargs['timeout'] = Config().TIME_OUT_OF_REQUEST
response = super().request(*args, **kwargs)
return response
except RequestException as e:
from py12306.log.common_log import CommonLog
if e.response:
response = e.response
else:
response = HTMLResponse(HTMLSession)
# response.status_code = 500
expand_class(response, 'json', Request.json)
response.reason = response.reason if response.reason else CommonLog.MESSAGE_RESPONSE_EMPTY_ERROR
return response
def cdn_request(self, url: str, cdn=None, method='GET', **kwargs):
from py12306.helpers.api import HOST_URL_OF_12306
from py12306.helpers.cdn import Cdn
if not cdn: cdn = Cdn.get_cdn()
url = url.replace(HOST_URL_OF_12306, cdn)
return self.request(method, url, headers={'Host': HOST_URL_OF_12306}, verify=False, **kwargs)
| Python | 0 |
89aa3cbc62a947b3623380f7d1fe631bdf070b98 | fix the need of admin to run | homeassistant/components/influxdb.py | homeassistant/components/influxdb.py | """
homeassistant.components.influxdb
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
InfluxDB component which allows you to send data to an Influx database.
For more details about this component, please refer to the documentation at
https://home-assistant.io/components/influxdb/
"""
import logging
import homeassistant.util as util
from homeassistant.helpers import validate_config
from homeassistant.const import (EVENT_STATE_CHANGED, STATE_ON, STATE_OFF,
STATE_UNLOCKED, STATE_LOCKED, STATE_UNKNOWN)
from homeassistant.components.sun import (STATE_ABOVE_HORIZON,
STATE_BELOW_HORIZON)
_LOGGER = logging.getLogger(__name__)
DOMAIN = "influxdb"
DEPENDENCIES = []
DEFAULT_HOST = 'localhost'
DEFAULT_PORT = 8086
DEFAULT_DATABASE = 'home_assistant'
REQUIREMENTS = ['influxdb==2.10.0']
CONF_HOST = 'host'
CONF_PORT = 'port'
CONF_DB_NAME = 'database'
CONF_USERNAME = 'username'
CONF_PASSWORD = 'password'
def setup(hass, config):
""" Setup the InfluxDB component. """
from influxdb import InfluxDBClient, exceptions
if not validate_config(config, {DOMAIN: ['host']}, _LOGGER):
return False
conf = config[DOMAIN]
host = conf[CONF_HOST]
port = util.convert(conf.get(CONF_PORT), int, DEFAULT_PORT)
database = util.convert(conf.get(CONF_DB_NAME), str, DEFAULT_DATABASE)
username = util.convert(conf.get(CONF_USERNAME), str)
password = util.convert(conf.get(CONF_PASSWORD), str)
try:
influx = InfluxDBClient(host=host, port=port, username=username,
password=password, database=database)
influx.query("select * from /.*/ LIMIT 1;")
except exceptions.InfluxDBClientError as exc:
_LOGGER.error("Database host is not accessible due to '%s', please "
"check your entries in the configuration file and that"
" the database exists and is READ/WRITE.", exc)
return False
def influx_event_listener(event):
""" Listen for new messages on the bus and sends them to Influx. """
state = event.data.get('new_state')
if state is None:
return
if state.state in (STATE_ON, STATE_LOCKED, STATE_ABOVE_HORIZON):
_state = 1
elif state.state in (STATE_OFF, STATE_UNLOCKED, STATE_UNKNOWN,
STATE_BELOW_HORIZON):
_state = 0
else:
_state = state.state
if _state == '':
return
try:
_state = float(_state)
except ValueError:
pass
measurement = state.attributes.get('unit_of_measurement', state.domain)
json_body = [
{
'measurement': measurement,
'tags': {
'domain': state.domain,
'entity_id': state.object_id,
},
'time': event.time_fired,
'fields': {
'value': _state,
}
}
]
try:
influx.write_points(json_body)
except exceptions.InfluxDBClientError:
_LOGGER.exception('Error saving event "%s" to InfluxDB', json_body)
hass.bus.listen(EVENT_STATE_CHANGED, influx_event_listener)
return True
| """
homeassistant.components.influxdb
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
InfluxDB component which allows you to send data to an Influx database.
For more details about this component, please refer to the documentation at
https://home-assistant.io/components/influxdb/
"""
import logging
import homeassistant.util as util
from homeassistant.helpers import validate_config
from homeassistant.const import (EVENT_STATE_CHANGED, STATE_ON, STATE_OFF,
STATE_UNLOCKED, STATE_LOCKED, STATE_UNKNOWN)
from homeassistant.components.sun import (STATE_ABOVE_HORIZON,
STATE_BELOW_HORIZON)
_LOGGER = logging.getLogger(__name__)
DOMAIN = "influxdb"
DEPENDENCIES = []
DEFAULT_HOST = 'localhost'
DEFAULT_PORT = 8086
DEFAULT_DATABASE = 'home_assistant'
REQUIREMENTS = ['influxdb==2.10.0']
CONF_HOST = 'host'
CONF_PORT = 'port'
CONF_DB_NAME = 'database'
CONF_USERNAME = 'username'
CONF_PASSWORD = 'password'
def setup(hass, config):
""" Setup the InfluxDB component. """
from influxdb import InfluxDBClient, exceptions
if not validate_config(config, {DOMAIN: ['host']}, _LOGGER):
return False
conf = config[DOMAIN]
host = conf[CONF_HOST]
port = util.convert(conf.get(CONF_PORT), int, DEFAULT_PORT)
database = util.convert(conf.get(CONF_DB_NAME), str, DEFAULT_DATABASE)
username = util.convert(conf.get(CONF_USERNAME), str)
password = util.convert(conf.get(CONF_PASSWORD), str)
try:
influx = InfluxDBClient(host=host, port=port, username=username,
password=password, database=database)
databases = [i['name'] for i in influx.get_list_database()]
except exceptions.InfluxDBClientError:
_LOGGER.error("Database host is not accessible. "
"Please check your entries in the configuration file.")
return False
if database not in databases:
_LOGGER.error("Database %s doesn't exist", database)
return False
def influx_event_listener(event):
""" Listen for new messages on the bus and sends them to Influx. """
state = event.data.get('new_state')
if state is None:
return
if state.state in (STATE_ON, STATE_LOCKED, STATE_ABOVE_HORIZON):
_state = 1
elif state.state in (STATE_OFF, STATE_UNLOCKED, STATE_UNKNOWN,
STATE_BELOW_HORIZON):
_state = 0
else:
_state = state.state
try:
_state = float(_state)
except ValueError:
pass
measurement = state.attributes.get('unit_of_measurement', state.domain)
json_body = [
{
'measurement': measurement,
'tags': {
'domain': state.domain,
'entity_id': state.object_id,
},
'time': event.time_fired,
'fields': {
'value': _state,
}
}
]
try:
influx.write_points(json_body)
except exceptions.InfluxDBClientError:
_LOGGER.exception('Error saving event to InfluxDB')
hass.bus.listen(EVENT_STATE_CHANGED, influx_event_listener)
return True
| Python | 0 |
f0243e8ab8897d218bcf45af91a7cd03a3f83c5e | Add section comments. | cloudkitpy/container.py | cloudkitpy/container.py | #
# container.py
# CloudKitPy
#
# Created by James Barrow on 28/04/2016.
# Copyright (c) 2013-2016 Pig on a Hill Productions. All rights reserved.
#
# !/usr/bin/env python
class Container:
# Getting the Public and Private Databases
public_cloud_database = None
private_cloud_database = None
# Getting the Identifier and Environment
container_identifier = None
environment = None
apns_environment = None
def __init__(
self,
container_identifier,
environment,
apns_environment=None
):
pass
# Discovering Users
def fetch_user_info(self):
"""Fetch information about the current user asynchronously."""
pass
def discover_user_info_with_email_address(self, email_address):
"""Fetch information about a single user.
Based on the user's email address.
"""
pass
def discover_user_info_with_user_record_name(self, record_name):
"""Fetch information about a single user using the record name."""
pass
| #
# container.py
# CloudKitPy
#
# Created by James Barrow on 28/04/2016.
# Copyright (c) 2013-2016 Pig on a Hill Productions. All rights reserved.
#
# !/usr/bin/env python
class Container:
public_cloud_database = None
private_cloud_database = None
container_identifier = None
environment = None
apns_environment = None
def __init__(
self,
container_identifier,
environment,
apns_environment=None
):
pass
def fetch_user_info(self):
"""Fetch information about the current user asynchronously."""
pass
def discover_user_info_with_email_address(self, email_address):
"""Fetch information about a single user.
Based on the user's email address.
"""
pass
def discover_user_info_with_user_record_name(self, record_name):
"""Fetch information about a single user using the record name."""
pass
| Python | 0 |
9274ec308974b0d6702e7f98a0b8a2c3be1cbe11 | FIX #170 Throw Python34 compatible exception | autosklearn/util/dependencies.py | autosklearn/util/dependencies.py | from warnings import warn
import pkg_resources
import re
from distutils.version import LooseVersion
RE_PATTERN = re.compile('^(?P<name>[\w\-]+)((?P<operation>==|>=|>)(?P<version>(\d+\.)?(\d+\.)?(\d+)))?$')
def verify_packages(packages):
if not packages:
return
if isinstance(packages, str):
packages = packages.splitlines()
for package in packages:
if not package:
continue
match = RE_PATTERN.match(package)
if match:
name = match.group('name')
operation = match.group('operation')
version = match.group('version')
_verify_package(name, operation, version)
else:
raise ValueError('Unable to read requirement: %s' % package)
def _verify_package(name, operation, version):
try:
module = pkg_resources.get_distribution(name)
except pkg_resources.DistributionNotFound:
raise MissingPackageError(name)
if not operation:
return
required_version = LooseVersion(version)
installed_version = LooseVersion(module.version)
if operation == '==':
check = required_version == installed_version
elif operation == '>':
check = installed_version > required_version
elif operation == '>=':
check = installed_version > required_version or \
installed_version == required_version
else:
raise NotImplementedError('operation \'%s\' is not supported' % operation)
if not check:
raise IncorrectPackageVersionError(name, installed_version, operation, required_version)
class MissingPackageError(Exception):
error_message = 'mandatory package \'{name}\' not found'
def __init__(self, package_name):
self.package_name = package_name
super(MissingPackageError, self).__init__(self.error_message.format(name=package_name))
class IncorrectPackageVersionError(Exception):
error_message = '\'{name} {installed_version}\' version mismatch ({operation}{required_version})'
def __init__(self, package_name, installed_version, operation, required_version):
self.package_name = package_name
self.installed_version = installed_version
self.operation = operation
self.required_version = required_version
message = self.error_message.format(name=package_name,
installed_version=installed_version,
operation=operation,
required_version=required_version)
super(IncorrectPackageVersionError, self).__init__(message)
| from warnings import warn
import pkg_resources
import re
from distutils.version import LooseVersion
RE_PATTERN = re.compile('^(?P<name>[\w\-]+)((?P<operation>==|>=|>)(?P<version>(\d+\.)?(\d+\.)?(\d+)))?$')
def verify_packages(packages):
if not packages:
return
if isinstance(packages, str):
packages = packages.splitlines()
for package in packages:
if not package:
continue
match = RE_PATTERN.match(package)
if match:
name = match.group('name')
operation = match.group('operation')
version = match.group('version')
_verify_package(name, operation, version)
else:
raise ValueError('Unable to read requirement: %s' % package)
def _verify_package(name, operation, version):
try:
module = pkg_resources.get_distribution(name)
except pkg_resources.DistributionNotFound:
raise MissingPackageError(name) from None
if not operation:
return
required_version = LooseVersion(version)
installed_version = LooseVersion(module.version)
if operation == '==':
check = required_version == installed_version
elif operation == '>':
check = installed_version > required_version
elif operation == '>=':
check = installed_version > required_version or \
installed_version == required_version
else:
raise NotImplementedError('operation \'%s\' is not supported' % operation)
if not check:
raise IncorrectPackageVersionError(name, installed_version, operation, required_version)
class MissingPackageError(Exception):
error_message = 'mandatory package \'{name}\' not found'
def __init__(self, package_name):
self.package_name = package_name
super(MissingPackageError, self).__init__(self.error_message.format(name=package_name))
class IncorrectPackageVersionError(Exception):
error_message = '\'{name} {installed_version}\' version mismatch ({operation}{required_version})'
def __init__(self, package_name, installed_version, operation, required_version):
self.package_name = package_name
self.installed_version = installed_version
self.operation = operation
self.required_version = required_version
message = self.error_message.format(name=package_name,
installed_version=installed_version,
operation=operation,
required_version=required_version)
super(IncorrectPackageVersionError, self).__init__(message)
| Python | 0 |
07e767f9c19ece3c41e33cca24dd2b0317244292 | Update the latest version | src/site/sphinx/conf.py | src/site/sphinx/conf.py | # -*- coding: utf-8 -*-
import sys, os, re
import xml.etree.ElementTree as etree
from datetime import date
from collections import defaultdict
def etree_to_dict(t):
t.tag = re.sub(r'\{[^\}]*\}', '', t.tag)
d = {t.tag: {} if t.attrib else None}
children = list(t)
if children:
dd = defaultdict(list)
for dc in map(etree_to_dict, children):
for k, v in dc.iteritems():
dd[k].append(v)
d = {t.tag: {k:v[0] if len(v) == 1 else v for k, v in dd.iteritems()}}
if t.attrib:
d[t.tag].update(('@' + k, v) for k, v in t.attrib.iteritems())
if t.text:
text = t.text.strip()
if children or t.attrib:
if text:
d[t.tag]['#text'] = text
else:
d[t.tag] = text
return d
# Parse the Maven pom.xml.
pom = etree_to_dict(etree.parse('../../../pom.xml').getroot())['project']
# Set the basic project information.
project = pom['name']
project_short = pom['name']
copyright = str(date.today().year) + ', ' + pom['organization']['name']
# Set the project version and release.
# Use the last known stable release if the current version ends with '-SNAPSHOT'.
if re.match(r'^.*-SNAPSHOT$', pom['version']):
release = '0.21.4.Final'
version = '0.21'
else:
release = pom['version']
version = re.match(r'^[0-9]+\.[0-9]+', pom['version']).group(0)
# Define some useful global substitutions.
rst_epilog = '\n'
rst_epilog += '.. |baseurl| replace:: http://line.github.io/armeria/\n'
rst_epilog += '.. |jetty_alpnAgent_version| replace:: ' + pom['properties']['jetty.alpnAgent.version'] + '\n'
rst_epilog += '.. |oss_parent_version| replace:: ' + pom['parent']['version'] + '\n'
rst_epilog += '.. |logback_version| replace:: ' + pom['properties']['logback.version'] + '\n'
rst_epilog += '.. |slf4j_version| replace:: ' + pom['properties']['slf4j.version'] + '\n'
rst_epilog += '.. |tomcat_version| replace:: ' + pom['properties']['tomcat.version'] + '\n'
rst_epilog += '\n'
needs_sphinx = '1.0'
extensions = ['sphinx.ext.autodoc']
templates_path = ['_templates']
source_suffix = '.rst'
source_encoding = 'utf-8-sig'
master_doc = 'index'
exclude_trees = ['.build']
add_function_parentheses = True
pygments_style = 'tango'
master_doc = 'index'
sys.path.append(os.path.abspath('_themes'))
html_theme = 'sphinx_rtd_theme'
html_theme_path = ['_themes']
html_short_title = project_short
html_static_path = ['_static']
html_use_smartypants = True
html_use_index = True
html_show_sourcelink = False
htmlhelp_basename = pom['artifactId']
| # -*- coding: utf-8 -*-
import sys, os, re
import xml.etree.ElementTree as etree
from datetime import date
from collections import defaultdict
def etree_to_dict(t):
t.tag = re.sub(r'\{[^\}]*\}', '', t.tag)
d = {t.tag: {} if t.attrib else None}
children = list(t)
if children:
dd = defaultdict(list)
for dc in map(etree_to_dict, children):
for k, v in dc.iteritems():
dd[k].append(v)
d = {t.tag: {k:v[0] if len(v) == 1 else v for k, v in dd.iteritems()}}
if t.attrib:
d[t.tag].update(('@' + k, v) for k, v in t.attrib.iteritems())
if t.text:
text = t.text.strip()
if children or t.attrib:
if text:
d[t.tag]['#text'] = text
else:
d[t.tag] = text
return d
# Parse the Maven pom.xml.
pom = etree_to_dict(etree.parse('../../../pom.xml').getroot())['project']
# Set the basic project information.
project = pom['name']
project_short = pom['name']
copyright = str(date.today().year) + ', ' + pom['organization']['name']
# Set the project version and release.
# Use the last known stable release if the current version ends with '-SNAPSHOT'.
if re.match(r'^.*-SNAPSHOT$', pom['version']):
release = '0.21.3.Final'
version = '0.21'
else:
release = pom['version']
version = re.match(r'^[0-9]+\.[0-9]+', pom['version']).group(0)
# Define some useful global substitutions.
rst_epilog = '\n'
rst_epilog += '.. |baseurl| replace:: http://line.github.io/armeria/\n'
rst_epilog += '.. |jetty_alpnAgent_version| replace:: ' + pom['properties']['jetty.alpnAgent.version'] + '\n'
rst_epilog += '.. |oss_parent_version| replace:: ' + pom['parent']['version'] + '\n'
rst_epilog += '.. |logback_version| replace:: ' + pom['properties']['logback.version'] + '\n'
rst_epilog += '.. |slf4j_version| replace:: ' + pom['properties']['slf4j.version'] + '\n'
rst_epilog += '.. |tomcat_version| replace:: ' + pom['properties']['tomcat.version'] + '\n'
rst_epilog += '\n'
needs_sphinx = '1.0'
extensions = ['sphinx.ext.autodoc']
templates_path = ['_templates']
source_suffix = '.rst'
source_encoding = 'utf-8-sig'
master_doc = 'index'
exclude_trees = ['.build']
add_function_parentheses = True
pygments_style = 'tango'
master_doc = 'index'
sys.path.append(os.path.abspath('_themes'))
html_theme = 'sphinx_rtd_theme'
html_theme_path = ['_themes']
html_short_title = project_short
html_static_path = ['_static']
html_use_smartypants = True
html_use_index = True
html_show_sourcelink = False
htmlhelp_basename = pom['artifactId']
| Python | 0 |
7965ce3036f98a9b880f19f688e7e282644e63cf | remove server_name | app/main.py | app/main.py | from flask import Flask, render_template
app = Flask(__name__)
@app.route('/')
def show_about():
return render_template('aboutme.html')
if __name__ == '__main__':
app.run() | from flask import Flask, render_template
app = Flask(__name__)
app.config['DEBUG'] = True
app.config['SERVER_NAME'] = "vcaen.com"
@app.route('/')
def show_about():
return render_template('aboutme.html')
if __name__ == '__main__':
app.run() | Python | 0.000008 |
a1d9312e1ac6f66aaf558652d890ac2a6bd67e40 | Add parent so we can track versions. | backend/loader/model/datafile.py | backend/loader/model/datafile.py | from dataitem import DataItem
class DataFile(DataItem):
def __init__(self, name, access, owner):
super(DataFile, self).__init__(name, access, owner, "datafile")
self.checksum = ""
self.size = 0
self.location = ""
self.mediatype = ""
self.conditions = []
self.text = ""
self.metatags = []
self.datadirs = []
self.parent = ""
| from dataitem import DataItem
class DataFile(DataItem):
def __init__(self, name, access, owner):
super(DataFile, self).__init__(name, access, owner, "datafile")
self.checksum = ""
self.size = 0
self.location = ""
self.mediatype = ""
self.conditions = []
self.text = ""
self.metatags = []
self.datadirs = []
| Python | 0 |
424b50960e7ca42c61ccc98864f9876e9688dcd4 | remove empty elements | example/models.py | example/models.py | from django.db import models
class Cake(models.Model):
name = models.CharField(max_length=100)
description = models.TextField()
class Meta:
verbose_name = 'Cake'
verbose_name_plural = 'Cakes'
def __unicode__(self):
return unicode('{}'.format(self.name))
def get_summary_description(self):
return filter(None, (
self.name,
self.description,
))
class Coffee(models.Model):
name = models.CharField(max_length=100)
rating = models.IntegerField()
class Meta:
verbose_name = 'Coffee'
verbose_name_plural = 'Coffees'
def __unicode__(self):
return unicode('{}'.format(self.name))
def get_summary_description(self):
return filter(None, (
self.name,
self.rating,
))
| from django.db import models
class Cake(models.Model):
name = models.CharField(max_length=100)
description = models.TextField()
class Meta:
verbose_name = 'Cake'
verbose_name_plural = 'Cakes'
def __unicode__(self):
return unicode('{}'.format(self.name))
def get_summary_description(self):
return self.name, self.description
class Coffee(models.Model):
name = models.CharField(max_length=100)
rating = models.IntegerField()
class Meta:
verbose_name = 'Coffee'
verbose_name_plural = 'Coffees'
def __unicode__(self):
return unicode('{}'.format(self.name))
def get_summary_description(self):
return self.name, self.rating
| Python | 0.319064 |
bcc7692e14b7b695f08dfb39aaccf3dbfa67d857 | Add safeGetInt to BMConfigParser | src/configparser.py | src/configparser.py | import ConfigParser
from singleton import Singleton
@Singleton
class BMConfigParser(ConfigParser.SafeConfigParser):
def set(self, section, option, value=None):
if self._optcre is self.OPTCRE or value:
if not isinstance(value, basestring):
raise TypeError("option values must be strings")
return ConfigParser.ConfigParser.set(self, section, option, value)
def get(self, section, option, raw=False, vars=None):
if section == "bitmessagesettings" and option == "timeformat":
try:
return ConfigParser.ConfigParser.get(self, section, option, raw, vars)
except ConfigParser.InterpolationError:
return ConfigParser.ConfigParser.get(self, section, option, True, vars)
return ConfigParser.ConfigParser.get(self, section, option, True, vars)
def safeGetBoolean(self, section, field):
if self.has_option(section, field):
try:
return self.getboolean(section, field)
except ValueError:
return False
return False
def safeGetInt(self, section, field):
if self.has_option(section, field):
try:
return self.getint(section, field)
except ValueError:
return 0
return 0
def safeGet(self, section, option, default = None):
if self.has_option(section, option):
return self.get(section, option)
else:
return default
def items(self, section, raw=False, vars=None):
return ConfigParser.ConfigParser.items(self, section, True, vars)
| import ConfigParser
from singleton import Singleton
@Singleton
class BMConfigParser(ConfigParser.SafeConfigParser):
def set(self, section, option, value=None):
if self._optcre is self.OPTCRE or value:
if not isinstance(value, basestring):
raise TypeError("option values must be strings")
return ConfigParser.ConfigParser.set(self, section, option, value)
def get(self, section, option, raw=False, vars=None):
if section == "bitmessagesettings" and option == "timeformat":
try:
return ConfigParser.ConfigParser.get(self, section, option, raw, vars)
except ConfigParser.InterpolationError:
return ConfigParser.ConfigParser.get(self, section, option, True, vars)
return ConfigParser.ConfigParser.get(self, section, option, True, vars)
def safeGetBoolean(self, section, field):
if self.has_option(section, field):
try:
return self.getboolean(section, field)
except ValueError:
return False
return False
def safeGet(self, section, option, default = None):
if self.has_option(section, option):
return self.get(section, option)
else:
return default
def items(self, section, raw=False, vars=None):
return ConfigParser.ConfigParser.items(self, section, True, vars)
| Python | 0.000001 |
5bce4bb123a086dd116abbd0932d34fa170a83cd | Update view to point to corrected template path | search/views.py | search/views.py | # GNU MediaGoblin -- federated, autonomous media hosting
# Copyright (C) 2011, 2012 MediaGoblin contributors. See AUTHORS.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from mediagoblin import mg_globals
from mediagoblin.db.base import Session
from mediagoblin.db.models import (MediaEntry, MediaTag, Collection,
CollectionItem, User)
from mediagoblin.decorators import uses_pagination
from mediagoblin.tools.response import render_to_response
from mediagoblin.tools.pagination import Pagination
from mediagoblin.plugins.search import forms as search_forms
from mediagoblin.tools.translate import lazy_pass_to_ugettext as _
from mediagoblin.meddleware.csrf import csrf_exempt
from sqlalchemy import and_, or_
import logging
_log = logging.getLogger(__name__)
@csrf_exempt
@uses_pagination
def search_results_view(request, page):
media_entries = None
pagination = None
query = None
form = search_forms.SearchForm(
request.form)
#if request.method == 'GET':
if request.GET.get('query') != None:
if request.GET.get('query') != '':
query = '%' + request.GET.get('query') + '%'
#cursor = MediaEntry.query.filter(MediaEntry.uploader==1).\
matches = MediaEntry.query.filter(
and_(
MediaEntry.state == u'processed',
or_(
MediaEntry.title.ilike(query),
MediaEntry.description.ilike(query)
)
)).order_by(MediaEntry.created.desc())
#_log.info(matches)
pagination = Pagination(page, matches)
media_entries = pagination()
return render_to_response(
request,
'mediagoblin/plugins/search/results.html',
{'media_entries': media_entries,
'pagination': pagination,
'form': form})
| # GNU MediaGoblin -- federated, autonomous media hosting
# Copyright (C) 2011, 2012 MediaGoblin contributors. See AUTHORS.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from mediagoblin import mg_globals
from mediagoblin.db.base import Session
from mediagoblin.db.models import (MediaEntry, MediaTag, Collection,
CollectionItem, User)
from mediagoblin.decorators import uses_pagination
from mediagoblin.tools.response import render_to_response
from mediagoblin.tools.pagination import Pagination
from mediagoblin.plugins.search import forms as search_forms
from mediagoblin.tools.translate import lazy_pass_to_ugettext as _
from mediagoblin.meddleware.csrf import csrf_exempt
from sqlalchemy import and_, or_
import logging
_log = logging.getLogger(__name__)
@csrf_exempt
@uses_pagination
def search_results_view(request, page):
media_entries = None
pagination = None
query = None
form = search_forms.SearchForm(
request.form)
#if request.method == 'GET':
if request.GET.get('query') != None:
if request.GET.get('query') != '':
query = '%' + request.GET.get('query') + '%'
#cursor = MediaEntry.query.filter(MediaEntry.uploader==1).\
matches = MediaEntry.query.filter(
and_(
MediaEntry.state == u'processed',
or_(
MediaEntry.title.ilike(query),
MediaEntry.description.ilike(query)
)
)).order_by(MediaEntry.created.desc())
#_log.info(matches)
pagination = Pagination(page, matches)
media_entries = pagination()
return render_to_response(
request, 'search/results.html',
{'media_entries': media_entries,
'pagination': pagination,
'form': form})
| Python | 0 |
d77777c2a011e77b284748d1dfbd3cd31e6c8565 | make verifier regexing more robust | c_test_environment/verifier.py | c_test_environment/verifier.py | import re
import sys
def verify(testout, expected, ordered):
numpat = re.compile(r'(\d+)')
tuplepat = re.compile(r'Materialized')
test = ({}, [])
expect = ({}, [])
def addTuple(tc, t):
if ordered:
tcl = tc[1]
tcl.append(t)
else:
tcs = tc[0]
if t not in tcs:
tcs[t] = 1
else:
tcs[t]+=1
with open(testout, 'r') as file:
for line in file.readlines():
m = tuplepat.search(line)
if m:
tlist = []
for number in numpat.finditer(line, m.end()):
tlist.append(int(number.group(0)))
t = tuple(tlist)
addTuple(test, t)
with open(expected, 'r') as file:
for line in file.readlines():
tlist = []
for number in numpat.finditer(line):
tlist.append(int(number.group(0)))
t = tuple(tlist)
addTuple(expect, t)
print test
print expect
assert test == expect, "\n test: %s !=\n expect:%s" % (test, expect)
print "pass"
if __name__ == '__main__':
testout=sys.argv[1]
expected=sys.argv[2]
ordered = False
if len(sys.argv) > 3:
if sys.argv[3] == 'o':
ordered = True
verify(testout, expected, ordered)
| import re
import sys
def verify(testout, expected, ordered):
test = ({}, [])
expect = ({}, [])
def addTuple(tc, t):
if ordered:
tcl = tc[1]
tcl.append(t)
else:
tcs = tc[0]
if t not in tcs:
tcs[t] = 1
else:
tcs[t]+=1
with open(testout, 'r') as file:
for line in file.readlines():
if re.match(r'Materialized', line):
tlist = []
for number in re.finditer(r'(\d+)', line):
tlist.append(int(number.group(0)))
t = tuple(tlist)
addTuple(test, t)
with open(expected, 'r') as file:
for line in file.readlines():
tlist = []
for number in re.finditer(r'(\d+)', line):
tlist.append(int(number.group(0)))
t = tuple(tlist)
addTuple(expect, t)
print test
print expect
assert test == expect, "\n test: %s !=\n expect:%s" % (test, expect)
print "pass"
if __name__ == '__main__':
testout=sys.argv[1]
expected=sys.argv[2]
ordered = False
if len(sys.argv) > 3:
if sys.argv[3] == 'o':
ordered = True
verify(testout, expected, ordered)
| Python | 0.000004 |
82641a936b2215480e29896cdafed3872c2928c6 | Remove xfails for newly passing tests in test_recipe_integration.py | test/test_recipes_integration.py | test/test_recipes_integration.py | import pytest
import os
import subprocess
import json
# Each test with recipe and appropriate parameters in one line
# Using bracket annotation to set it optional (xfail)
TEST_CASES = [
"activedata_usage",
"backout_rate",
["code_coverage --path caps --rev 45715ece25fc"],
"code_coverage_by_suite --path caps --rev 45715ece25fc",
"config_durations",
"files_with_coverage",
"intermittent_tests",
"intermittent_test_data",
["raw_coverage --path caps --rev 45715ece25fc"],
"test_durations",
["tests_config_times -t test-windows10-64/opt-awsy-e10s"],
"tests_in_duration",
"try_efficiency",
"try_usage",
["try_users"]
]
def load_tests(tests):
return [pytest.param(test[0], marks=pytest.mark.xfail)
if isinstance(test, list)
else test
for test in tests]
@pytest.mark.skipif(os.getenv("TRAVIS_EVENT_TYPE") != "cron", reason="Not run by cron job")
@pytest.mark.parametrize("recipe", load_tests(TEST_CASES))
def test_recipe_integration(recipe):
command = ['adr', '--format', 'json']
command.extend(recipe.split(" "))
data = subprocess.check_output(command, stderr=subprocess.STDOUT)
result = json.loads(data)
assert result
assert len(result)
| import pytest
import os
import subprocess
import json
# Each test with recipe and appropriate parameters in one line
# Using bracket annotation to set it optional (xfail)
TEST_CASES = [
"activedata_usage",
["backout_rate"],
["code_coverage --path caps --rev 45715ece25fc"],
"code_coverage_by_suite --path caps --rev 45715ece25fc",
"config_durations",
"files_with_coverage",
["intermittent_tests"],
["intermittent_test_data"],
["raw_coverage --path caps --rev 45715ece25fc"],
"test_durations",
["tests_config_times -t test-windows10-64/opt-awsy-e10s"],
["tests_in_duration"],
["try_efficiency"],
["try_usage"],
["try_users"]
]
def load_tests(tests):
return [pytest.param(test[0], marks=pytest.mark.xfail)
if isinstance(test, list)
else test
for test in tests]
@pytest.mark.skipif(os.getenv("TRAVIS_EVENT_TYPE") != "cron", reason="Not run by cron job")
@pytest.mark.parametrize("recipe", load_tests(TEST_CASES))
def test_recipe_integration(recipe):
command = ['adr', '--format', 'json']
command.extend(recipe.split(" "))
data = subprocess.check_output(command, stderr=subprocess.STDOUT)
result = json.loads(data)
assert result
assert len(result)
| Python | 0.000004 |
4be67b6f46c5f4a7f8a2b89199cff2373dcc7a43 | Fix casing | ca_qc_trois_rivieres/people.py | ca_qc_trois_rivieres/people.py | # coding: utf-8
from utils import CanadianScraper, CanadianPerson as Person
import re
COUNCIL_PAGE = 'http://www.v3r.net/a-propos-de-la-ville/vie-democratique/conseil-municipal/conseillers-municipaux'
MAYOR_URL = 'http://www.v3r.net/a-propos-de-la-ville/vie-democratique/mairie'
class TroisRivieresPersonScraper(CanadianScraper):
def scrape(self):
# mayor first, can't find email
page = self.lxmlize(MAYOR_URL)
photo_url = page.xpath('//img[contains(@alt, "Maire")]//@src')[0]
name = page.xpath('//img/@alt[contains(., "Maire")]')[0]
assert len(name), "missing mayor's name"
name = re.sub(r'Maire', '', name, flags=re.I).strip()
p = Person(primary_org='legislature', name=name, district="Trois-Rivières", role="Maire",
image=photo_url)
p.add_source(MAYOR_URL)
yield p
page = self.lxmlize(COUNCIL_PAGE)
members = page.xpath('//div[@class="photos_conseillers"]//figure')
assert len(members), 'No councillors found'
for member in members:
photo_url = member.xpath('.//a//img/@src')[0]
url = member.xpath('.//figcaption//a/@href')[0]
email = self.lxmlize(url).xpath(
'//div[@class="content-page"]//a[starts-with(@href, "mailto:")]/@href')[0]
email = re.sub('^mailto:', '', email)
name, district = map(
lambda x: x.strip(),
member.xpath('.//figcaption//text()'))
if district.lower() in ('des estacades', 'des plateaux',
'des terrasses', 'du sanctuaire'):
district = re.sub('\A(?:de(?: la)?|des|du) ', lambda match: match.group(0).lower(), district, flags=re.I)
else:
district = re.sub('\A(?:de(?: la)?|des|du) ', '', district, flags=re.I)
p = Person(primary_org='legislature', name=name, district=district, role='Conseiller',
image=photo_url)
p.add_source(COUNCIL_PAGE)
p.add_source(url)
p.add_contact('email', email)
yield p
| # coding: utf-8
from utils import CanadianScraper, CanadianPerson as Person
import re
COUNCIL_PAGE = 'http://www.v3r.net/a-propos-de-la-ville/vie-democratique/conseil-municipal/conseillers-municipaux'
MAYOR_URL = 'http://www.v3r.net/a-propos-de-la-ville/vie-democratique/mairie'
class TroisRivieresPersonScraper(CanadianScraper):
def scrape(self):
# mayor first, can't find email
page = self.lxmlize(MAYOR_URL)
photo_url = page.xpath('//img[contains(@alt, "Maire")]//@src')[0]
name = page.xpath('//img/@alt[contains(., "Maire")]')[0]
assert len(name), "missing mayor's name"
name = re.sub(r'Maire', '', name, flags=re.I).strip()
p = Person(primary_org='legislature', name=name, district="Trois-Rivières", role="Maire",
image=photo_url)
p.add_source(MAYOR_URL)
yield p
page = self.lxmlize(COUNCIL_PAGE)
members = page.xpath('//div[@class="photos_conseillers"]//figure')
assert len(members), 'No councillors found'
for member in members:
photo_url = member.xpath('.//a//img/@src')[0]
url = member.xpath('.//figcaption//a/@href')[0]
email = self.lxmlize(url).xpath(
'//div[@class="content-page"]//a[starts-with(@href, "mailto:")]/@href')[0]
email = re.sub('^mailto:', '', email)
name, district = map(
lambda x: x.strip(),
member.xpath('.//figcaption//text()'))
if district.lower() not in ('des estacades', 'des plateaux',
'des terrasses', 'du sanctuaire'):
district = re.sub('\A(?:de(?: la)?|des|du) ', '', district, flags=re.I)
p = Person(primary_org='legislature', name=name, district=district, role='Conseiller',
image=photo_url)
p.add_source(COUNCIL_PAGE)
p.add_source(url)
p.add_contact('email', email)
yield p
| Python | 0.000006 |
5ecab61cd66b821d70e73006f60d8f7908bfb403 | Remove comment | capstone/mdp/fixed_game_mdp.py | capstone/mdp/fixed_game_mdp.py | from .mdp import MDP
from .game_mdp import GameMDP
from ..utils import utility
class FixedGameMDP(GameMDP):
def __init__(self, game, opp_player, opp_idx):
'''
opp_player: the opponent player
opp_idx: the idx of the opponent player in the game
'''
self._game = game
self._opp_player = opp_player
self._opp_idx = opp_idx
self._agent_idx = opp_idx ^ 1
self._states = {}
def reward(self, game, move, next_game):
return utility(next_game, self._agent_idx) if next_game.is_over() else 0
def start_state(self):
new_game = self._game.copy()
if not new_game.is_over() and new_game.cur_player() == self._opp_idx:
chosen_move = self._opp_player.choose_move(new_game)
new_game.make_move(chosen_move)
return new_game
def transitions(self, game, move):
if game.is_over():
return []
new_game = game.copy().make_move(move)
if not new_game.is_over() and new_game.cur_player() == self._opp_idx:
chosen_move = self._opp_player.choose_move(new_game)
new_game.make_move(chosen_move)
return [(new_game, 1.0)]
| from .mdp import MDP
from .game_mdp import GameMDP
from ..utils import utility
class FixedGameMDP(GameMDP):
def __init__(self, game, opp_player, opp_idx):
'''
opp_player: the opponent player
opp_idx: the idx of the opponent player in the game
'''
self._game = game
self._opp_player = opp_player
self._opp_idx = opp_idx
self._agent_idx = opp_idx ^ 1
self._states = {}
#######
# MDP #
#######
def reward(self, game, move, next_game):
return utility(next_game, self._agent_idx) if next_game.is_over() else 0
def start_state(self):
new_game = self._game.copy()
if not new_game.is_over() and new_game.cur_player() == self._opp_idx:
chosen_move = self._opp_player.choose_move(new_game)
new_game.make_move(chosen_move)
return new_game
def transitions(self, game, move):
if game.is_over():
return []
new_game = game.copy().make_move(move)
if not new_game.is_over() and new_game.cur_player() == self._opp_idx:
chosen_move = self._opp_player.choose_move(new_game)
new_game.make_move(chosen_move)
return [(new_game, 1.0)]
| Python | 0 |
b8c3ad8c9eb4cdf2618839b425b8413181a443ff | Fix bug in writePrecisePathToSnapshot not bactracking prperly to the initial structure | AdaptivePELE/analysis/writePrecisePathToSnapshot.py | AdaptivePELE/analysis/writePrecisePathToSnapshot.py | """
Recreate the trajectory fragments to the led to the discovery of a snapshot,
specified by the tuple (epoch, trajectory, snapshot) and write as a pdb file
"""
import os
import sys
import argparse
import glob
import itertools
from AdaptivePELE.utilities import utilities
def parseArguments():
"""
Parse the command-line options
:returns: :py:class:`.Clustering`, int, int, int, str -- Clustering
object, number of trajectory, number of snapshot, number of epoch,
output path where to write the files
"""
desc = "Write the information related to the conformation network to file\n"
parser = argparse.ArgumentParser(description=desc)
parser.add_argument("clusteringObject", type=str, help="Path to the clustering object")
parser.add_argument("trajectory", type=int, help="Trajectory number")
parser.add_argument("snapshot", type=int, help="Snapshot to select (in accepted steps)")
parser.add_argument("epoch", type=str, help="Path to the epoch to search the snapshot")
parser.add_argument("-o", type=str, default=None, help="Output path where to write the files")
args = parser.parse_args()
return args.clusteringObject, args.trajectory, args.snapshot, args.epoch, args.o
if __name__ == "__main__":
clusteringObject, trajectory, snapshot, epoch, outputPath = parseArguments()
if outputPath is not None:
outputPath = os.path.join(outputPath, "")
if not os.path.exists(outputPath):
os.makedirs(outputPath)
else:
outputPath = ""
sys.stderr.write("Reading clustering object...\n")
cl = utilities.readClusteringObject(clusteringObject)
pathway = []
# Strip out trailing backslash if present
pathPrefix, epoch = os.path.split(epoch.rstrip("/"))
sys.stderr.write("Creating pathway...\n")
while True:
filename = glob.glob(os.path.join(pathPrefix, epoch, "*traj*_%d.pdb" % trajectory))
snapshots = utilities.getSnapshots(filename[0])
snapshots = snapshots[:snapshot+1]
pathway.insert(0, snapshots)
if epoch == '0':
# Once we get to epoch 0, we just need to append the trajectory
# where the cluster was found and we can break out of the loop
break
procMapping = open(os.path.join(pathPrefix, epoch, "processorMapping.txt")).read().rstrip().split(':')
epoch, trajectory, snapshot = map(int, procMapping[trajectory-1][1:-1].split(','))
epoch = str(epoch)
sys.stderr.write("Writing pathway...\n")
with open(outputPath+"pathway.pdb", "a") as f:
f.write("ENDMDL\n".join(itertools.chain.from_iterable(pathway)))
| """
Recreate the trajectory fragments to the led to the discovery of a snapshot,
specified by the tuple (epoch, trajectory, snapshot) and write as a pdb file
"""
import os
import sys
import argparse
import glob
import itertools
from AdaptivePELE.utilities import utilities
def parseArguments():
"""
Parse the command-line options
:returns: :py:class:`.Clustering`, int, int, int, str -- Clustering
object, number of trajectory, number of snapshot, number of epoch,
output path where to write the files
"""
desc = "Write the information related to the conformation network to file\n"
parser = argparse.ArgumentParser(description=desc)
parser.add_argument("clusteringObject", type=str, help="Path to the clustering object")
parser.add_argument("trajectory", type=int, help="Trajectory number")
parser.add_argument("snapshot", type=int, help="Snapshot to select (in accepted steps)")
parser.add_argument("epoch", type=str, help="Path to the epoch to search the snapshot")
parser.add_argument("-o", type=str, default=None, help="Output path where to write the files")
args = parser.parse_args()
return args.clusteringObject, args.trajectory, args.snapshot, args.epoch, args.o
if __name__ == "__main__":
clusteringObject, trajectory, snapshot, epoch, outputPath = parseArguments()
if outputPath is not None:
outputPath = os.path.join(outputPath, "")
if not os.path.exists(outputPath):
os.makedirs(outputPath)
else:
outputPath = ""
sys.stderr.write("Reading clustering object...\n")
cl = utilities.readClusteringObject(clusteringObject)
pathway = []
# Strip out trailing backslash if present
pathPrefix, epoch = os.path.split(epoch.rstrip("/"))
sys.stderr.write("Creating pathway...\n")
while epoch != "0":
filename = glob.glob(os.path.join(pathPrefix,epoch,"*traj*_%d.pdb" % trajectory))
snapshots = utilities.getSnapshots(filename[0])
snapshots = snapshots[:snapshot+1]
pathway.insert(0, snapshots)
procMapping = open(os.path.join(pathPrefix, epoch, "processorMapping.txt")).read().rstrip().split(':')
epoch, trajectory, snapshot = map(int, procMapping[trajectory-1][1:-1].split(','))
epoch = str(epoch)
sys.stderr.write("Writing pathway...\n")
with open(outputPath+"pathway.pdb", "a") as f:
f.write("ENDMDL\n".join(itertools.chain.from_iterable(pathway)))
| Python | 0 |
1549510fd9371818cff6644984896a5a9060cb36 | Fix print statements with python3 syntax. | benchmarks/TSP/compare_to_BKS.py | benchmarks/TSP/compare_to_BKS.py | # -*- coding: utf-8 -*-
import json, sys, os
import numpy as np
# Compare a set of computed solutions to best known solutions on the
# same problems.
def s_round(v, d):
if d == 0:
return str(int(v))
else:
return str(round(v, d))
def log_comparisons(BKS, files):
print(','.join(["Instance", "Jobs", "Vehicles", "Best known cost", "Solution cost", "Gap (%)", "Computing time (ms)"]))
jobs = []
gaps = []
computing_times = []
for f in files:
instance = f[0:f.rfind("_sol.json")]
instance = instance[instance.rfind('/') + 1:]
if instance not in BKS:
continue
indicators = BKS[instance]
BK_cost = indicators['best_known_cost']
nb_job = indicators['jobs']
jobs.append(nb_job)
line = [
instance,
nb_job,
indicators['vehicles'],
BK_cost
]
with open(f, 'r') as sol_file:
solution = json.load(sol_file)
if solution['code'] != 0:
continue
cost = solution['summary']['cost']
line.append(cost)
gap = 100 * (float(cost) / BK_cost - 1)
line.append(round(gap, 2))
gaps.append(gap)
computing_time = solution['summary']['computing_times']['loading'] + solution['summary']['computing_times']['solving']
line.append(computing_time)
computing_times.append(computing_time)
print(','.join(map(lambda x: str(x), line)))
print(',')
print('Average,' + s_round(np.mean(jobs), 1) + ',,,,' + s_round(np.mean(gaps), 2) + ',' + s_round(np.mean(computing_times), 0))
# Percentiles
print(',')
gaps_percentiles = np.percentile(gaps, [0, 10, 25, 50, 75, 90, 100])
ct_percentiles = np.percentile(computing_times, [0, 10, 25, 50, 75, 90, 100])
print(',Gaps,Computing times')
titles = ['Min', 'First decile', 'Lower quartile', 'Median', 'Upper quartile', 'Ninth decile', 'Max']
for i in range(len(titles)):
print(titles[i] + ',' + s_round(gaps_percentiles[i], 2) + ',' + s_round(ct_percentiles[i], 0))
if __name__ == "__main__":
# First argument if the best known solution file.
with open(sys.argv[1], 'r') as sol_file:
bks = json.load(sol_file)
# Remaining arguments are computed solution files to use.
log_comparisons(bks, sys.argv[2:])
| # -*- coding: utf-8 -*-
import json, sys, os
import numpy as np
# Compare a set of computed solutions to best known solutions on the
# same problems.
def s_round(v, d):
if d == 0:
return str(int(v))
else:
return str(round(v, d))
def log_comparisons(BKS, files):
print ','.join(["Instance", "Jobs", "Vehicles", "Best known cost", "Solution cost", "Gap (%)", "Computing time (ms)"])
jobs = []
gaps = []
computing_times = []
for f in files:
instance = f[0:f.rfind("_sol.json")]
instance = instance[instance.rfind('/') + 1:]
if instance not in BKS:
continue
indicators = BKS[instance]
BK_cost = indicators['best_known_cost']
nb_job = indicators['jobs']
jobs.append(nb_job)
line = [
instance,
nb_job,
indicators['vehicles'],
BK_cost
]
with open(f, 'r') as sol_file:
solution = json.load(sol_file)
if solution['code'] != 0:
continue
cost = solution['summary']['cost']
line.append(cost)
gap = 100 * (float(cost) / BK_cost - 1)
line.append(round(gap, 2))
gaps.append(gap)
computing_time = solution['summary']['computing_times']['loading'] + solution['summary']['computing_times']['solving']
line.append(computing_time)
computing_times.append(computing_time)
print ','.join(map(lambda x: str(x), line))
print ','
print 'Average,' + s_round(np.mean(jobs), 1) + ',,,,' + s_round(np.mean(gaps), 2) + ',' + s_round(np.mean(computing_times), 0)
# Percentiles
print ','
gaps_percentiles = np.percentile(gaps, [0, 10, 25, 50, 75, 90, 100])
ct_percentiles = np.percentile(computing_times, [0, 10, 25, 50, 75, 90, 100])
print ',Gaps,Computing times'
titles = ['Min', 'First decile', 'Lower quartile', 'Median', 'Upper quartile', 'Ninth decile', 'Max']
for i in range(len(titles)):
print titles[i] + ',' + s_round(gaps_percentiles[i], 2) + ',' + s_round(ct_percentiles[i], 0)
if __name__ == "__main__":
# First argument if the best known solution file.
with open(sys.argv[1], 'r') as sol_file:
bks = json.load(sol_file)
# Remaining arguments are computed solution files to use.
log_comparisons(bks, sys.argv[2:])
| Python | 0.000038 |
63bd0d8905ea9392e56f501381c054ba3a4ed1a7 | Update __init__.py | chainer/optimizers/__init__.py | chainer/optimizers/__init__.py | -# import classes and functions
from chainer.optimizers.ada_delta import AdaDelta # NOQA
from chainer.optimizers.ada_grad import AdaGrad # NOQA
from chainer.optimizers.adam import Adam # NOQA
from chainer.optimizers.momentum_sgd import MomentumSGD # NOQA
from chainer.optimizers.msvag import MSVAG # NOQA
from chainer.optimizers.nesterov_ag import NesterovAG # NOQA
from chainer.optimizers.rmsprop import RMSprop # NOQA
from chainer.optimizers.rmsprop_graves import RMSpropGraves # NOQA
from chainer.optimizers.sgd import SGD # NOQA
from chainer.optimizers.smorms3 import SMORMS3 # NOQA
| from chainer.optimizers.ada_delta import AdaDelta # NOQA
from chainer.optimizers.ada_grad import AdaGrad # NOQA
from chainer.optimizers.adam import Adam # NOQA
from chainer.optimizers.momentum_sgd import MomentumSGD # NOQA
from chainer.optimizers.msvag import MSVAG # NOQA
from chainer.optimizers.nesterov_ag import NesterovAG # NOQA
from chainer.optimizers.rmsprop import RMSprop # NOQA
from chainer.optimizers.rmsprop_graves import RMSpropGraves # NOQA
from chainer.optimizers.sgd import SGD # NOQA
from chainer.optimizers.smorms3 import SMORMS3 # NOQA
| Python | 0.000072 |
7a7661bd03c947212ee46ca598cae5cd316757c1 | Fix flake8 | chainercv/datasets/__init__.py | chainercv/datasets/__init__.py | from chainercv.datasets.camvid.camvid_dataset import camvid_ignore_label_color # NOQA
from chainercv.datasets.camvid.camvid_dataset import camvid_label_colors # NOQA
from chainercv.datasets.camvid.camvid_dataset import camvid_label_names # NOQA
from chainercv.datasets.camvid.camvid_dataset import CamVidDataset # NOQA
from chainercv.datasets.cityscapes.cityscapes_semantic_segmentation_dataset import CityscapesSemanticSegmentationDataset # NOQA
from chainercv.datasets.cityscapes.cityscapes_utils import cityscapes_label_colors # NOQA
from chainercv.datasets.cityscapes.cityscapes_utils import cityscapes_label_names # NOQA
from chainercv.datasets.cityscapes.cityscapes_utils import cityscapes_labels # NOQA
from chainercv.datasets.cub.cub_keypoint_dataset import CUBKeypointDataset # NOQA
from chainercv.datasets.cub.cub_label_dataset import CUBLabelDataset # NOQA
from chainercv.datasets.cub.cub_utils import cub_label_names # NOQA
from chainercv.datasets.directory_parsing_classification_dataset import directory_parsing_label_names # NOQA
from chainercv.datasets.directory_parsing_classification_dataset import DirectoryParsingClassificationDataset # NOQA
from chainercv.datasets.online_products.online_products_dataset import OnlineProductsDataset # NOQA
from chainercv.datasets.transform_dataset import TransformDataset # NOQA
from chainercv.datasets.voc.voc_detection_dataset import VOCDetectionDataset # NOQA
from chainercv.datasets.voc.voc_semantic_segmentation_dataset import VOCSemanticSegmentationDataset # NOQA
from chainercv.datasets.voc.voc_utils import voc_detection_label_names # NOQA
from chainercv.datasets.voc.voc_utils import voc_semantic_segmentation_ignore_label_color # NOQA
from chainercv.datasets.voc.voc_utils import voc_semantic_segmentation_label_colors # NOQA
from chainercv.datasets.voc.voc_utils import voc_semantic_segmentation_label_names # NOQA
| from chainercv.datasets.camvid.camvid_dataset import camvid_ignore_label_color # NOQA
from chainercv.datasets.camvid.camvid_dataset import camvid_label_colors # NOQA
from chainercv.datasets.camvid.camvid_dataset import camvid_label_names # NOQA
from chainercv.datasets.camvid.camvid_dataset import CamVidDataset # NOQA
from chainercv.datasets.cityscapes.cityscapes_utils import cityscapes_labels # NOQA
from chainercv.datasets.cityscapes.cityscapes_utils import cityscapes_label_names # NOQA
from chainercv.datasets.cityscapes.cityscapes_utils import cityscapes_label_colors # NOQA
from chainercv.datasets.cityscapes.cityscapes_semantic_segmentation_dataset import CityscapesSemanticSegmentationDataset # NOQA
from chainercv.datasets.cub.cub_keypoint_dataset import CUBKeypointDataset # NOQA
from chainercv.datasets.cub.cub_label_dataset import CUBLabelDataset # NOQA
from chainercv.datasets.cub.cub_utils import cub_label_names # NOQA
from chainercv.datasets.directory_parsing_classification_dataset import directory_parsing_label_names # NOQA
from chainercv.datasets.directory_parsing_classification_dataset import DirectoryParsingClassificationDataset # NOQA
from chainercv.datasets.online_products.online_products_dataset import OnlineProductsDataset # NOQA
from chainercv.datasets.transform_dataset import TransformDataset # NOQA
from chainercv.datasets.voc.voc_detection_dataset import VOCDetectionDataset # NOQA
from chainercv.datasets.voc.voc_semantic_segmentation_dataset import VOCSemanticSegmentationDataset # NOQA
from chainercv.datasets.voc.voc_utils import voc_detection_label_names # NOQA
from chainercv.datasets.voc.voc_utils import voc_semantic_segmentation_ignore_label_color # NOQA
from chainercv.datasets.voc.voc_utils import voc_semantic_segmentation_label_colors # NOQA
from chainercv.datasets.voc.voc_utils import voc_semantic_segmentation_label_names # NOQA
| Python | 0 |
b27398e4dd246d542c0a82ecc35da60911edc9fd | revert to dev version | regionmask/version.py | regionmask/version.py | version = "0.7.0+dev"
| version = "0.7.0"
| Python | 0 |
b318ced455f13477743a6d2d81b3556695b27374 | Make to_factorized_noisy support args | chainerrl/links/noisy_chain.py | chainerrl/links/noisy_chain.py | import chainer
from chainer.links import Linear
from chainerrl.links.noisy_linear import FactorizedNoisyLinear
def to_factorized_noisy(link, *args, **kwargs):
"""Add noisiness to components of given link
Currently this function supports L.Linear (with and without bias)
"""
def func_to_factorized_noisy(link):
if isinstance(link, Linear):
return FactorizedNoisyLinear(link, *args, **kwargs)
else:
return link
_map_links(func_to_factorized_noisy, link)
def _map_links(func, link):
if isinstance(link, chainer.Chain):
children_names = link._children.copy()
for name in children_names:
child = getattr(link, name)
new_child = func(child)
if new_child is child:
_map_links(func, child)
else:
delattr(link, name)
with link.init_scope():
setattr(link, name, func(child))
elif isinstance(link, chainer.ChainList):
children = link._children
for i in range(len(children)):
child = children[i]
new_child = func(child)
if new_child is child:
_map_links(func, child)
else:
# mimic ChainList.add_link
children[i] = func(child)
children[i].name = str(i)
| import chainer
from chainer.links import Linear
from chainerrl.links.noisy_linear import FactorizedNoisyLinear
def to_factorized_noisy(link):
"""Add noisiness to components of given link
Currently this function supports L.Linear (with and without bias)
"""
_map_links(_func_to_factorized_noisy, link)
def _func_to_factorized_noisy(link):
if isinstance(link, Linear):
return FactorizedNoisyLinear(link)
else:
return link
def _map_links(func, link):
if isinstance(link, chainer.Chain):
children_names = link._children.copy()
for name in children_names:
child = getattr(link, name)
new_child = func(child)
if new_child is child:
_map_links(func, child)
else:
delattr(link, name)
with link.init_scope():
setattr(link, name, func(child))
elif isinstance(link, chainer.ChainList):
children = link._children
for i in range(len(children)):
child = children[i]
new_child = func(child)
if new_child is child:
_map_links(func, child)
else:
# mimic ChainList.add_link
children[i] = func(child)
children[i].name = str(i)
| Python | 0.000001 |
af9006169d6f537d26f58926873334312bd6ed99 | Add simple bounded cache decorator | pykit/utils/convenience.py | pykit/utils/convenience.py | # -*- coding: utf-8 -*-
from __future__ import print_function, division, absolute_import
try:
import __builtin__ as builtins
except ImportError:
import builtins
import string
import functools
import collections
from itertools import chain
map = lambda *args: list(builtins.map(*args))
invert = lambda d: dict((v, k) for k, v in d.items())
def nestedmap(f, args):
"""
Map `f` over `args`, which contains elements or nested lists
"""
result = []
for arg in args:
if isinstance(arg, list):
result.append(list(map(f, arg)))
else:
result.append(f(arg))
return result
def flatten(args):
"""Flatten nested lists (return as iterator)"""
for arg in args:
if isinstance(arg, list):
for x in arg:
yield x
else:
yield arg
def mutable_flatten(args):
"""Flatten nested lists (return as iterator)"""
for arg in args:
if isinstance(arg, list):
for x in arg:
yield x
else:
yield arg
def mergedicts(*dicts):
"""Merge all dicts into a new dict"""
return dict(chain(*[d.items() for d in dicts]))
def listify(f):
"""Decorator to turn generator results into lists"""
@functools.wraps(f)
def wrapper(*args, **kwargs):
return list(f(*args, **kwargs))
return wrapper
@listify
def prefix(iterable, prefix):
"""Prefix each item from the iterable with a prefix"""
for item in iterable:
yield prefix + item
# ______________________________________________________________________
# Strings
def substitute(s, **substitutions):
"""Use string.Template to substitute placeholders in a string"""
return string.Template(s).substitute(**substitutions)
# ______________________________________________________________________
def hashable(x):
try:
hash(x)
except TypeError:
return False
else:
return True
# ______________________________________________________________________
class ValueDict(object):
"""
Use dict values as attributes.
"""
def __init__(self, d):
self.__getattr__ = d.__getitem__
self.__setattr__ = d.__setitem__
self.__detattr__ = d.__detitem__
# ______________________________________________________________________
def call_once(f):
"""Cache the result of the function, so that it's called only once"""
result = []
def wrapper(*args, **kwargs):
if len(result) == 0:
ret = f(*args, **kwargs)
result.append(ret)
return result[0]
return wrapper
def cached(limit=1000):
"""Cache the result for the arguments just once"""
def decorator(f):
cache = {}
def wrapper(*args):
if args not in cache:
if len(cache) > limit:
cache.popitem()
cache[args] = f(*args)
return cache[args]
return wrapper
return decorator
# ______________________________________________________________________
def make_temper():
"""Return a function that returns temporary names"""
temps = collections.defaultdict(int)
seen = set()
def temper(input=""):
name, dot, tail = input.rpartition('.')
if tail.isdigit():
varname = name
else:
varname = input
count = temps[varname]
temps[varname] += 1
if varname and count == 0:
result = varname
else:
result = "%s.%d" % (varname, count)
assert result not in seen
seen.add(result)
return result
return temper
# ______________________________________________________________________
| # -*- coding: utf-8 -*-
from __future__ import print_function, division, absolute_import
try:
import __builtin__ as builtins
except ImportError:
import builtins
import string
import functools
import collections
from itertools import chain
map = lambda *args: list(builtins.map(*args))
invert = lambda d: dict((v, k) for k, v in d.items())
def nestedmap(f, args):
"""
Map `f` over `args`, which contains elements or nested lists
"""
result = []
for arg in args:
if isinstance(arg, list):
result.append(list(map(f, arg)))
else:
result.append(f(arg))
return result
def flatten(args):
"""Flatten nested lists (return as iterator)"""
for arg in args:
if isinstance(arg, list):
for x in arg:
yield x
else:
yield arg
def mutable_flatten(args):
"""Flatten nested lists (return as iterator)"""
for arg in args:
if isinstance(arg, list):
for x in arg:
yield x
else:
yield arg
def mergedicts(*dicts):
"""Merge all dicts into a new dict"""
return dict(chain(*[d.items() for d in dicts]))
def listify(f):
"""Decorator to turn generator results into lists"""
@functools.wraps(f)
def wrapper(*args, **kwargs):
return list(f(*args, **kwargs))
return wrapper
@listify
def prefix(iterable, prefix):
"""Prefix each item from the iterable with a prefix"""
for item in iterable:
yield prefix + item
# ______________________________________________________________________
# Strings
def substitute(s, **substitutions):
"""Use string.Template to substitute placeholders in a string"""
return string.Template(s).substitute(**substitutions)
# ______________________________________________________________________
def hashable(x):
try:
hash(x)
except TypeError:
return False
else:
return True
# ______________________________________________________________________
class ValueDict(object):
"""
Use dict values as attributes.
"""
def __init__(self, d):
self.__getattr__ = d.__getitem__
self.__setattr__ = d.__setitem__
self.__detattr__ = d.__detitem__
# ______________________________________________________________________
def cached(f):
"""Cache the result of the function"""
result = []
def wrapper(*args, **kwargs):
if len(result) == 0:
ret = f(*args, **kwargs)
result.append(ret)
return result[0]
return wrapper
call_once = cached
# ______________________________________________________________________
def make_temper():
"""Return a function that returns temporary names"""
temps = collections.defaultdict(int)
seen = set()
def temper(input=""):
name, dot, tail = input.rpartition('.')
if tail.isdigit():
varname = name
else:
varname = input
count = temps[varname]
temps[varname] += 1
if varname and count == 0:
result = varname
else:
result = "%s.%d" % (varname, count)
assert result not in seen
seen.add(result)
return result
return temper
# ______________________________________________________________________
| Python | 0 |
a5da284b70e3e04a919679475e9cf2e276430077 | Fix "--attribution" option (doesn't need an argument) | renderchan/manager.py | renderchan/manager.py | __author__ = 'Konstantin Dmitriev'
from gettext import gettext as _
from optparse import OptionParser
import os.path
from renderchan.core import RenderChan
from renderchan.core import Attribution
from renderchan.file import RenderChanFile
from renderchan.project import RenderChanProject
def process_args():
parser = OptionParser(
usage=_("""
%prog """))
# The --freeze and --unfreeze options are temporary disabled, because this function should behave differently.
#parser.add_option("--freeze", dest="freezeList",
# action="append",
# help=_("Freeze path."))
#parser.add_option("--unfreeze", dest="unfreezeList",
# action="append",
# help=_("Un-freeze path."))
parser.add_option("--lang", dest="setLanguage",
action="store", nargs=1,
help=_("Switch project language."))
parser.add_option("--attribution", dest="getAttribution",
action="store_true",
default=False,
help=_("Get attribution information from file."))
options, args = parser.parse_args()
return options, args
def main(argv):
options, args = process_args()
# Parse frozen parameters
# The --freeze and --unfreeze options are temporary disabled, because this function should behave differently.
#if options.freezeList or options.unfreezeList:
# renderchan = RenderChan()
# if not options.freezeList:
# options.freezeList=[]
# if not options.unfreezeList:
# options.unfreezeList=[]
# frozenListChanged=False
# for filename in options.freezeList:
# filename=os.path.abspath(filename)
# if not filename in options.unfreezeList:
# taskfile = RenderChanFile(filename, renderchan.modules, renderchan.projects)
# taskfile.setFrozen(True)
# frozenListChanged=True
# for filename in options.unfreezeList:
# filename=os.path.abspath(filename)
# if not filename in options.freezeList:
# taskfile = RenderChanFile(filename, renderchan.modules, renderchan.projects)
# taskfile.setFrozen(False)
# frozenListChanged=True
# if frozenListChanged:
# taskfile.project.saveFrozenPaths()
if options.setLanguage:
project = RenderChanProject(os.getcwd())
project.switchLanguage(options.setLanguage)
if options.getAttribution:
filename=os.path.abspath(options.getAttribution)
info = Attribution(filename)
info.output() | __author__ = 'Konstantin Dmitriev'
from gettext import gettext as _
from optparse import OptionParser
import os.path
from renderchan.core import RenderChan
from renderchan.core import Attribution
from renderchan.file import RenderChanFile
from renderchan.project import RenderChanProject
def process_args():
parser = OptionParser(
usage=_("""
%prog """))
# The --freeze and --unfreeze options are temporary disabled, because this function should behave differently.
#parser.add_option("--freeze", dest="freezeList",
# action="append",
# help=_("Freeze path."))
#parser.add_option("--unfreeze", dest="unfreezeList",
# action="append",
# help=_("Un-freeze path."))
parser.add_option("--lang", dest="setLanguage",
action="store", nargs=1,
help=_("Switch project language."))
parser.add_option("--attribution", dest="getAttribution",
action="store", nargs=1,
help=_("Get attribution information from file."))
options, args = parser.parse_args()
return options, args
def main(argv):
options, args = process_args()
# Parse frozen parameters
# The --freeze and --unfreeze options are temporary disabled, because this function should behave differently.
#if options.freezeList or options.unfreezeList:
# renderchan = RenderChan()
# if not options.freezeList:
# options.freezeList=[]
# if not options.unfreezeList:
# options.unfreezeList=[]
# frozenListChanged=False
# for filename in options.freezeList:
# filename=os.path.abspath(filename)
# if not filename in options.unfreezeList:
# taskfile = RenderChanFile(filename, renderchan.modules, renderchan.projects)
# taskfile.setFrozen(True)
# frozenListChanged=True
# for filename in options.unfreezeList:
# filename=os.path.abspath(filename)
# if not filename in options.freezeList:
# taskfile = RenderChanFile(filename, renderchan.modules, renderchan.projects)
# taskfile.setFrozen(False)
# frozenListChanged=True
# if frozenListChanged:
# taskfile.project.saveFrozenPaths()
if options.setLanguage:
project = RenderChanProject(os.getcwd())
project.switchLanguage(options.setLanguage)
if options.getAttribution:
filename=os.path.abspath(options.getAttribution)
info = Attribution(filename)
info.output() | Python | 0.001461 |
629d8a699e0ab944d4775bd6a31709546d4ca839 | add doc string | contacts/models/misc.py | contacts/models/misc.py |
#!/usr/bin/python
#Django Imports
from django.db import models
from django.conf import settings
from django.contrib.auth.models import User
from django.core.exceptions import ObjectDoesNotExist
from jsonfield import JSONField
#Local Imports
import transports
from utils.models import TimeStampedModel,BaseQuerySet
class Connection(models.Model):
class Meta:
app_label = 'contacts'
objects = BaseQuerySet.as_manager()
identity = models.CharField(max_length=25,primary_key=True)
contact = models.ForeignKey(settings.MESSAGING_CONTACT,blank=True,null=True)
description = models.CharField(max_length=30,blank=True,null=True,help_text='Description of phone numbers relationship to contact')
is_primary = models.BooleanField(default=False,verbose_name='Primary')
def __unicode__(self):
return "{} ({})".format(self.contact.study_id if self.contact else '',self.identity)
def send_custom(self,text,translated_text='',languages='',**kwargs):
return self.send_message(text,translation_status='cust',translated_text=translated_text,languages=languages,is_system=False,**kwargs)
def send_message(self,text,**kwargs):
# Send message over system transport
try:
msg_id, msg_success, external_data = transports.send(self.identity,text)
except transports.TransportError as e:
msg_id = ""
msg_success = False
external_data = {"error":str(e)}
# Create new message
new_message = self.message_set.create(
text=text,
connection=self,
external_id=msg_id,
external_success=msg_success,
external_status="Sent" if msg_success else external_data.get("status","Failed"),
external_data=external_data,
**kwargs)
return new_message
class PractitionerQuerySet(BaseQuerySet):
def for_participant(self,participant):
return self.filter(facility=participant.facility).exclude(user__first_name='').select_related('user').first()
class Practitioner(models.Model):
'''
User profile for nurse practitioners to link a User profile to a Facility
'''
class Meta:
app_label = 'contacts'
objects = PractitionerQuerySet.as_manager()
user = models.OneToOneField(User)
facility = models.CharField(max_length=15,choices=settings.FACILITY_CHOICES)
password_changed = models.BooleanField(default=False)
@property
def username(self):
return self.user.username
def __str__(self):
return '{0}'.format(self.user.username)
def __repr__(self):
return '<{0!s}> <{1}>'.format(self.facility,self.user.username)
class EventLog(TimeStampedModel):
"""
The basic idea behind this model is to keep track of which staff accounts take which actions.
These are currently created in the "visit seen" and "attended DRF" end points, however
there is not currently any logic that accesses / uses the data anywhere in the codebase.
"""
class Meta:
app_label = 'contacts'
objects = BaseQuerySet.as_manager()
user = models.ForeignKey(User)
event = models.CharField(max_length=25,help_text="Event Name")
data = JSONField()
|
#!/usr/bin/python
#Django Imports
from django.db import models
from django.conf import settings
from django.contrib.auth.models import User
from django.core.exceptions import ObjectDoesNotExist
from jsonfield import JSONField
#Local Imports
import transports
from utils.models import TimeStampedModel,BaseQuerySet
class Connection(models.Model):
class Meta:
app_label = 'contacts'
objects = BaseQuerySet.as_manager()
identity = models.CharField(max_length=25,primary_key=True)
contact = models.ForeignKey(settings.MESSAGING_CONTACT,blank=True,null=True)
description = models.CharField(max_length=30,blank=True,null=True,help_text='Description of phone numbers relationship to contact')
is_primary = models.BooleanField(default=False,verbose_name='Primary')
def __unicode__(self):
return "{} ({})".format(self.contact.study_id if self.contact else '',self.identity)
def send_custom(self,text,translated_text='',languages='',**kwargs):
return self.send_message(text,translation_status='cust',translated_text=translated_text,languages=languages,is_system=False,**kwargs)
def send_message(self,text,**kwargs):
# Send message over system transport
try:
msg_id, msg_success, external_data = transports.send(self.identity,text)
except transports.TransportError as e:
msg_id = ""
msg_success = False
external_data = {"error":str(e)}
# Create new message
new_message = self.message_set.create(
text=text,
connection=self,
external_id=msg_id,
external_success=msg_success,
external_status="Sent" if msg_success else external_data.get("status","Failed"),
external_data=external_data,
**kwargs)
return new_message
class PractitionerQuerySet(BaseQuerySet):
def for_participant(self,participant):
return self.filter(facility=participant.facility).exclude(user__first_name='').select_related('user').first()
class Practitioner(models.Model):
'''
User profile for nurse practitioners to link a User profile to a Facility
'''
class Meta:
app_label = 'contacts'
objects = PractitionerQuerySet.as_manager()
user = models.OneToOneField(User)
facility = models.CharField(max_length=15,choices=settings.FACILITY_CHOICES)
password_changed = models.BooleanField(default=False)
@property
def username(self):
return self.user.username
def __str__(self):
return '{0}'.format(self.user.username)
def __repr__(self):
return '<{0!s}> <{1}>'.format(self.facility,self.user.username)
class EventLog(TimeStampedModel):
class Meta:
app_label = 'contacts'
objects = BaseQuerySet.as_manager()
user = models.ForeignKey(User)
event = models.CharField(max_length=25,help_text="Event Name")
data = JSONField()
| Python | 0.000002 |
6535755cfdc914efc5e1efc6a89ed9dca7c78b87 | Correct docstrings of result_suite/sample.py | checker/result_suite/sample.py | checker/result_suite/sample.py | from checker.base import BakeryTestCase as TestCase
class SampleTest(TestCase):
target = 'result'
path = '.'
def setUp(self):
# read ttf
# self.font = fontforge.open(self.path)
pass
def test_ok(self):
""" This test succeeds """
self.assertTrue(True)
def test_failure(self):
""" This test fails """
self.assertTrue(False)
def test_error(self):
""" Unexpected error """
1 / 0
self.assertTrue(False)
| from checker.base import BakeryTestCase as TestCase
class SampleTest(TestCase):
target = 'result'
path = '.'
def setUp(self):
# read ttf
# self.font = fontforge.open(self.path)
pass
def test_ok(self):
""" This test failed """
self.assertTrue(True)
def test_failure(self):
""" This test failed """
self.assertTrue(False)
def test_error(self):
""" Unexpected error """
1 / 0
self.assertTrue(False)
| Python | 0.000002 |
0df292fbb34a66ee66fce919ea63b68a5f9eff1a | Set up data structures for parsing projects | app/data.py | app/data.py | import json
import os
from typing import Dict, List
from app.util import cached_function
class Projects():
def __init__(self) -> None:
self.languages: List[Language] = []
@staticmethod
def load_from_file() -> 'Projects':
current_directory = os.path.dirname(os.path.realpath(__file__))
path = os.path.join(current_directory, 'data', 'projects.json')
with open(path, 'r') as handle:
data = handle.read()
parsed_data = json.loads(data)
return Projects.load(parsed_data)
@staticmethod
def load(data: Dict[str, Dict[str, Dict[str, str]]]) -> 'Projects':
projects = Projects()
for key, value in data.items():
language = Language.load(key, value)
projects.languages.append(language)
return projects
class Language():
def __init__(self) -> None:
self.name: str = ''
self.projects: List[Project] = []
@staticmethod
def load(key: str, data: Dict[str, Dict[str, str]]) -> 'Language':
language = Language()
language.name = key
for key, value in data.items():
project = Project.load(key, value)
language.projects.append(project)
return language
class Project():
def __init__(self) -> None:
self.name: str = ''
self.description: str = ''
self.github: str = ''
self.rubygems: str = ''
self.pypi: str = ''
self.npm: str = ''
self.web: str = ''
@staticmethod
def load(key: str, data: Dict[str, str]) -> 'Project':
project = Project()
project.name = key
project.description = data.get('description', '')
project.github = data.get('github', '')
project.rubygems = data.get('rubygems', '')
project.pypi = data.get('pypi', '')
project.npm = data.get('npm', '')
project.web = data.get('web', '')
return project
def links(self) -> Dict[str, str]:
links: Dict[str, str] = {
'github': self.github,
'rubygems': self.rubygems,
'pypi': self.pypi,
'npm': self.npm,
'web': self.web,
}
links = dict([(k, v) for k, v in links.items() if v])
return links
@cached_function
def get_projects() -> Projects:
loaded_projects = Projects.load_from_file()
return loaded_projects
class Shelf():
def __init__(self) -> None:
self.data: Dict[str, List[Dict[str, str]]]
@staticmethod
def load() -> 'Shelf':
current_directory = os.path.dirname(os.path.realpath(__file__))
path = os.path.join(current_directory, 'data', 'shelf.json')
with open(path, 'r') as handle:
shelf_data = handle.read()
shelf = Shelf()
shelf.data = json.loads(shelf_data)
return shelf
@cached_function
def get_shelf() -> Shelf:
loaded_shelf = Shelf.load()
return loaded_shelf
| import json
import os
from typing import Dict, List
from app.util import cached_function
class Projects():
def __init__(self) -> None:
self.data: Dict[str, Dict[str, Dict[str, str]]] = {}
@staticmethod
def load() -> 'Projects':
current_directory = os.path.dirname(os.path.realpath(__file__))
path = os.path.join(current_directory, 'data', 'projects.json')
with open(path, 'r') as handle:
project_data = handle.read()
projects = Projects()
projects.data = json.loads(project_data)
return projects
@cached_function
def get_projects() -> Projects:
loaded_projects = Projects.load()
return loaded_projects
class Shelf():
def __init__(self) -> None:
self.data: Dict[str, List[Dict[str, str]]]
@staticmethod
def load() -> 'Shelf':
current_directory = os.path.dirname(os.path.realpath(__file__))
path = os.path.join(current_directory, 'data', 'shelf.json')
with open(path, 'r') as handle:
shelf_data = handle.read()
shelf = Shelf()
shelf.data = json.loads(shelf_data)
return shelf
@cached_function
def get_shelf() -> Shelf:
loaded_shelf = Shelf.load()
return loaded_shelf
| Python | 0.000003 |
75a93ae0e55e240a5f8595c0d58d15b1d846948a | Add support for spectate after starting the game | chillin_server/gui/protocol.py | chillin_server/gui/protocol.py | # -*- coding: utf-8 -*-
# python imports
from threading import Thread, Lock, Event
import sys
if sys.version_info > (3,):
from queue import Queue
else:
from Queue import Queue
# project imports
from ..config import Config
from .network import Network
from .parser import Parser
from .messages import Auth
class Protocol:
def __init__(self, authenticate_func, game_info):
self._auth_func = authenticate_func
self._game_info = game_info
self._all_messages_data = []
self._network = Network()
self._parser = Parser()
self._clients = set()
self._lock = Lock()
self._running = Event()
self.send_queue = Queue()
def _add_client(self, sock):
self._send_msg(sock, self._game_info)
i = 0
while i < len(self._all_messages_data):
if not self._send_data(sock, self._all_messages_data[i]):
self._network.close(sock)
return
i += 1
# TODO: some messages may not get delivered here ...
self._lock.acquire()
self._clients.add(sock)
self._lock.release()
def _remove_clients(self, socks):
for sock in socks:
self._network.close(sock)
self._lock.acquire()
self._clients.difference_update(socks)
self._lock.release()
def _can_join(self, sock):
if len(self._clients) >= Config.config['gui'].get('max_spectators', 5):
return False
return True
def _accept(self):
def init(sock):
authenticated = False
if Config.config['general']['offline_mode']:
authenticated = True
else:
token = self._network.recv_data(sock)
if token and self._auth_func(token):
authenticated = True
self._send_msg(sock, Auth(authenticated=True))
else:
self._send_msg(sock, Auth(authenticated=False))
self._network.close(sock)
if authenticated:
self._add_client(sock)
while self._running.is_set():
sock = self._network.accept()
if not self._can_join(sock):
self._network.close(sock)
continue
if sock and self._running.is_set():
t = Thread(target=init, args=(sock,))
t.setDaemon(True)
t.start()
def _send_data(self, sock, data):
return self._network.send_data(sock, data)
def _send_msg(self, sock, msg):
data = self._parser.encode(msg)
return self._send_data(sock, data)
def _broadcast_msg(self, msg):
data = self._parser.encode(msg)
self._all_messages_data.append(data)
disconnected_clients = []
for sock in self._clients:
if not self._send_data(sock, data):
disconnected_clients.append(sock)
self._remove_clients(disconnected_clients)
def _send_thread(self):
while self._running.is_set():
msg = self.send_queue.get()
if msg:
self._broadcast_msg(msg)
def start(self):
self._network.start()
self._running.set()
t = Thread(target=self._accept)
t.setDaemon(True)
t.start()
t = Thread(target=self._send_thread)
t.setDaemon(True)
t.start()
def stop(self):
for sock in self._clients:
self._network.close(sock)
self._running.clear()
self.send_queue.put(None)
self._network.stop()
| # -*- coding: utf-8 -*-
# python imports
from threading import Thread, Lock, Event
import sys
if sys.version_info > (3,):
from queue import Queue
else:
from Queue import Queue
# project imports
from ..config import Config
from .network import Network
from .parser import Parser
from .messages import Auth
class Protocol:
def __init__(self, authenticate_func, game_info):
self._auth_func = authenticate_func
self._game_info = game_info
self._network = Network()
self._parser = Parser()
self._clients = set()
self._lock = Lock()
self._running = Event()
self.send_queue = Queue()
def _add_client(self, sock):
self._send_msg(sock, self._game_info)
self._lock.acquire()
self._clients.add(sock)
self._lock.release()
def _remove_clients(self, socks):
for sock in socks:
self._network.close(sock)
self._lock.acquire()
self._clients.difference_update(socks)
self._lock.release()
def _accept(self):
def auth(sock):
token = self._network.recv_data(sock)
if token and self._auth_func(token):
self._send_msg(sock, Auth(authenticated=True))
self._add_client(sock)
else:
self._send_msg(sock, Auth(authenticated=False))
self._network.close(sock)
while self._running.is_set():
sock = self._network.accept()
if sock and self._running.is_set():
if Config.config['general']['offline_mode']:
self._add_client(sock)
else:
Thread(target=auth, args=(sock,)).start()
def _send_msg(self, sock, msg):
data = self._parser.encode(msg)
self._network.send_data(sock, data)
def _broadcast_msg(self, msg):
data = self._parser.encode(msg)
disconnected_clients = []
for sock in self._clients:
if not self._network.send_data(sock, data):
disconnected_clients.append(sock)
self._remove_clients(disconnected_clients)
def _send_thread(self):
while self._running.is_set():
msg = self.send_queue.get()
if msg:
self._broadcast_msg(msg)
def start(self):
self._network.start()
self._running.set()
t = Thread(target=self._accept)
t.setDaemon(True)
t.start()
t = Thread(target=self._send_thread)
t.setDaemon(True)
t.start()
def stop(self):
for sock in self._clients:
self._network.close(sock)
self._running.clear()
self.send_queue.put(None)
self._network.stop()
| Python | 0 |