commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 1
2.94k
⌀ | new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
|
---|---|---|---|---|---|---|---|---|---|
ba378a1c151017ad6bd3ab6351fdbf534c731229 | vanth/main.py | vanth/main.py | import logging
import os
import chryso.connection
import sepiida.config
import sepiida.log
import vanth.config
import vanth.server
import vanth.tables
LOGGER = logging.getLogger(__name__)
def create_db_connection(config):
engine = chryso.connection.Engine(config.db, vanth.tables)
chryso.connection.store(engine)
return engine
def create_application(config):
create_db_connection(config)
LOGGER.info("Starting up vanth version %s", vanth.version.VERSION)
application = vanth.server.create_app(config)
logging.getLogger('vanth.cors').setLevel(logging.WARNING)
return application
def setup_logging():
logging.getLogger().setLevel(logging.DEBUG)
logging.basicConfig()
sepiida.log.setup_logging()
def get_config():
return sepiida.config.load('/etc/vanth.yaml', vanth.config.SPECIFICATION)
def main():
setup_logging()
config = get_config()
application = create_application(config)
try:
host = os.getenv('HOST', 'localhost')
port = int(os.getenv('PORT', 4545))
application.run(host, port)
except KeyboardInterrupt:
LOGGER.info('Shutting down')
| import logging
import os
import chryso.connection
import sepiida.config
import sepiida.log
import vanth.config
import vanth.server
import vanth.tables
LOGGER = logging.getLogger(__name__)
def create_db_connection(config):
engine = chryso.connection.Engine(config.db, vanth.tables)
chryso.connection.store(engine)
return engine
def create_application(config):
create_db_connection(config)
LOGGER.info("Starting up vanth version %s", vanth.version.VERSION)
application = vanth.server.create_app(config)
logging.getLogger('vanth.cors').setLevel(logging.WARNING)
return application
def setup_logging():
logging.getLogger().setLevel(logging.DEBUG)
logging.basicConfig()
sepiida.log.setup_logging()
logging.getLogger('vanth.sgml').setLevel(logging.INFO)
def get_config():
return sepiida.config.load('/etc/vanth.yaml', vanth.config.SPECIFICATION)
def main():
setup_logging()
config = get_config()
application = create_application(config)
try:
host = os.getenv('HOST', 'localhost')
port = int(os.getenv('PORT', 4545))
application.run(host, port)
except KeyboardInterrupt:
LOGGER.info('Shutting down')
| Make SGML less chatty in normal operation | Make SGML less chatty in normal operation
| Python | agpl-3.0 | EliRibble/vanth,EliRibble/vanth,EliRibble/vanth,EliRibble/vanth |
3c12a453a9686e998662fea822f85fb307f1d746 | emma2/msm/flux/__init__.py | emma2/msm/flux/__init__.py | from .api import *
| r"""
===================================================================
flux - Reactive flux an transition pathways (:mod:`emma2.msm.flux`)
===================================================================
.. currentmodule:: emma2.msm.flux
This module contains functions to compute reactive flux networks and
find dominant reaction pathways in such networks.
TPT-object
==========
.. autosummary::
:toctree: generated/
tpt - ReactiveFlux object
Reactive flux
=============
.. autosummary::
:toctree: generated/
flux_matrix - TPT flux network
to_netflux - Netflux from gross flux
flux_production - Net flux-production for all states
flux_producers
flux_consumers
coarsegrain
Reaction rates and fluxes
=========================
.. autosummary::
:toctree: generated/
total_flux
rate
mfpt
Pathway decomposition
=====================
.. autosummary::
:toctree: generated/
pathways
"""
from .api import *
| Include flux package in doc | [msm/flux] Include flux package in doc
| Python | bsd-2-clause | arokem/PyEMMA,trendelkampschroer/PyEMMA,trendelkampschroer/PyEMMA,arokem/PyEMMA |
79bb94f51cd2dca65479cb39f6c365c4c372b0ca | forumuser/models.py | forumuser/models.py | from django.contrib.auth.models import AbstractUser, Group
from django.db import models
class ForumUser(AbstractUser):
def __unicode__(self):
return '%(username)s (%(email)s)' % {
'username': self.username,
'email': self.email
}
| from django.contrib.auth.models import AbstractUser, Group
from django.db import models
class ForumUser(AbstractUser):
items_per_page = models.PositiveSmallIntegerField(blank=True, null=True)
def __unicode__(self):
return '%(username)s (%(email)s)' % {
'username': self.username,
'email': self.email
}
| Add items per page as a preference to the forumm user model | Add items per page as a preference to the forumm user model
| Python | mit | hellsgate1001/thatforum_django,hellsgate1001/thatforum_django,hellsgate1001/thatforum_django |
e77c5acd4fcdb16f17245122212458baf5195064 | bookworm/settings_mobile.py | bookworm/settings_mobile.py | from settings import *
import settings
TEMPLATE_DIRS_BASE = TEMPLATE_DIRS
TEMPLATE_DIRS = (
'%s/library/templates/mobile/auth' % ROOT_PATH,
'%s/library/templates/mobile' % ROOT_PATH,
)
TEMPLATE_DIRS += TEMPLATE_DIRS_BASE
MOBILE = True
| from settings import *
import settings
TEMPLATE_DIRS_BASE = TEMPLATE_DIRS
TEMPLATE_DIRS = (
'%s/library/templates/mobile/auth' % ROOT_PATH,
'%s/library/templates/mobile' % ROOT_PATH,
)
TEMPLATE_DIRS += TEMPLATE_DIRS_BASE
MOBILE = True
SESSION_COOKIE_NAME = 'bookworm_mobile'
| Change cookie name for mobile setting | Change cookie name for mobile setting | Python | bsd-3-clause | google-code-export/threepress,anselmorenato/threepress,anselmorenato/threepress,anselmorenato/threepress,google-code-export/threepress,google-code-export/threepress,lizadaly/threepress,anselmorenato/threepress,lizadaly/threepress,google-code-export/threepress,lizadaly/threepress,lizadaly/threepress |
d3caf69dfe98aa2fd0f9046c01035cdd7e4e359e | opps/articles/tests/models.py | opps/articles/tests/models.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.test import TestCase
from opps.articles.models import Article, Post
class ArticleModelTest(TestCase):
fixtures = ['tests/initial_data.json']
def setUp(self):
self.article = Article.objects.get(id=1)
def test_child_class(self):
self.assertTrue(self.article.child_class)
self.assertEqual(self.article.child_class, 'Post')
def test_get_absolute_url(self):
self.assertEqual(self.article.get_absolute_url(),
u'/channel-01/test-post-application')
self.assertEqual(self.article.get_absolute_url(),
"/{0}/{1}".format(self.article.channel.long_slug,
self.article.slug))
class PostModelTest(TestCase):
fixtures = ['tests/initial_data.json']
def setUp(self):
self.post = Post.objects.get(id=1)
def test_basic_post_exist(self):
post = Post.objects.all()
self.assertTrue(post)
self.assertTrue(post[0], self.post)
self.assertEqual(len(post), 1)
self.assertEqual(post[0].slug, u'test-post-application')
self.assertEqual(post[0].title, u'test post application')
self.assertTrue(post[0].short_url)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.test import TestCase
from opps.articles.models import Article, Post
class ArticleModelTest(TestCase):
fixtures = ['tests/initial_data.json']
def setUp(self):
self.article = Article.objects.get(id=1)
def test_child_class(self):
self.assertTrue(self.article.child_class)
self.assertEqual(self.article.child_class, 'Post')
def test_get_absolute_url(self):
self.assertEqual(self.article.get_absolute_url(),
u'/channel-01/test-post-application')
self.assertEqual(self.article.get_absolute_url(),
"/{0}/{1}".format(self.article.channel.long_slug,
self.article.slug))
def test_recommendation(self):
self.assertEqual([], self.article.recommendation())
class PostModelTest(TestCase):
fixtures = ['tests/initial_data.json']
def setUp(self):
self.post = Post.objects.get(id=1)
def test_basic_post_exist(self):
post = Post.objects.all()
self.assertTrue(post)
self.assertTrue(post[0], self.post)
self.assertEqual(len(post), 1)
self.assertEqual(post[0].slug, u'test-post-application')
self.assertEqual(post[0].title, u'test post application')
self.assertTrue(post[0].short_url)
| Test recommendation via article class | Test recommendation via article class
| Python | mit | williamroot/opps,jeanmask/opps,opps/opps,jeanmask/opps,jeanmask/opps,williamroot/opps,YACOWS/opps,YACOWS/opps,opps/opps,williamroot/opps,YACOWS/opps,opps/opps,YACOWS/opps,williamroot/opps,jeanmask/opps,opps/opps |
a86eaffa53a18389ea628f37c76900cc24c701f6 | opps/contrib/logging/admin.py | opps/contrib/logging/admin.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.contrib import admin
from .models import Logging
class LoggingAdmin(admin.ModelAdmin):
model = Logging
raw_id_fields = ('user',)
exclude = ('site_iid', 'site_domain')
admin.site.register(Logging, LoggingAdmin)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.contrib import admin
from .models import Logging
class LoggingAdmin(admin.ModelAdmin):
model = Logging
raw_id_fields = ('user',)
exclude = ('site_iid', 'site_domain', 'mirror_site')
admin.site.register(Logging, LoggingAdmin)
| Add field mirror_site at exclude on LoggingAdmin | Add field mirror_site at exclude on LoggingAdmin
| Python | mit | YACOWS/opps,williamroot/opps,YACOWS/opps,williamroot/opps,williamroot/opps,jeanmask/opps,jeanmask/opps,YACOWS/opps,opps/opps,opps/opps,williamroot/opps,opps/opps,jeanmask/opps,jeanmask/opps,YACOWS/opps,opps/opps |
014c726b258a5cb6a147e9919ea6122fd513f8cd | items/admin.py | items/admin.py | from django.contrib import admin
from .models import Item
from tasks.models import Task
class TaskInline(admin.TabularInline):
model = Task
class ItemAdmin(admin.ModelAdmin):
inlines = [TaskInline,]
list_display = ['title', 'project', 'marker', 'progress', 'ready', 'blocked']
list_editable = ['title', 'marker', 'progress', 'ready', 'blocked']
list_filter = ['project',]
admin.site.register(Item, ItemAdmin)
| from django.contrib import admin
from .models import Item
from tasks.models import Task
class TaskInline(admin.TabularInline):
model = Task
class ItemAdmin(admin.ModelAdmin):
inlines = [TaskInline,]
list_display = ['title', 'project', 'marker', 'progress', 'ready', 'blocked']
list_editable = ['marker', 'progress', 'ready', 'blocked']
list_filter = ['project',]
admin.site.register(Item, ItemAdmin)
| Revert "Adds item title to list editable" | Revert "Adds item title to list editable"
This reverts commit e69869d4945cd72245e9243b3ec3131145ba27c2.
| Python | mit | dhiana/arpostits_api |
714fd7d0c173672f636e8d051b24046b10d3f481 | format_json.py | format_json.py | #! /usr/bin/env python
import sys
import json
for filepath in sys.argv[1:]:
with open(filepath) as f:
try:
oyster = json.load(f)
except ValueError:
sys.stderr.write("In file: {}\n".format(filepath))
raise
with open(filepath, 'w') as f:
json.dump(oyster, f, indent=4, separators=(',', ': '), sort_keys=True)
f.write('\n') # add a trailing newline.
| #! /usr/bin/env python3
import sys
import json
for filepath in sys.argv[1:]:
with open(filepath) as f:
try:
oyster = json.load(f)
except ValueError:
sys.stderr.write("In file: {}\n".format(filepath))
raise
with open(filepath, 'w') as f:
json.dump(oyster, f, ensure_ascii=False, indent=4, separators=(',', ': '), sort_keys=True)
f.write('\n') # add a trailing newline.
| Make this work for non-ASCII chars as well. | Make this work for non-ASCII chars as well.
| Python | mit | nbeaver/cmd-oysters,nbeaver/cmd-oysters |
4de23cffa16c71e287efba7d32ba375feeb9bc13 | format_json.py | format_json.py | #! /usr/bin/env python3
import sys
import json
import argparse
def format_json(fp):
try:
data = json.load(fp)
except ValueError:
sys.stderr.write("In file: {}\n".format(fp.name))
raise
# Jump back to the beginning of the file before overwriting it.
fp.seek(0)
json.dump(data, fp, ensure_ascii=False, indent=4, separators=(',', ': '), sort_keys=True)
fp.write('\n') # add a trailing newline.
fp.close()
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Format JSON files in place.'
)
parser.add_argument(
'files',
type=argparse.FileType('r+'),
help='JSON filepaths',
nargs='+'
)
args = parser.parse_args()
for json_file in args.files:
format_json(json_file)
| #! /usr/bin/env python3
import sys
import json
import argparse
def format_json(fp):
try:
data = json.load(fp)
except ValueError:
sys.stderr.write("In file: {}\n".format(fp.name))
raise
# Jump back to the beginning of the file before overwriting it.
fp.seek(0)
fp.truncate(0)
json.dump(data, fp, ensure_ascii=False, indent=4, separators=(',', ': '), sort_keys=True)
fp.write('\n') # add a trailing newline.
fp.close()
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Format JSON files in place.'
)
parser.add_argument(
'files',
type=argparse.FileType('r+'),
help='JSON filepaths',
nargs='+'
)
args = parser.parse_args()
for json_file in args.files:
format_json(json_file)
| Truncate the file before writing more data. | Truncate the file before writing more data.
| Python | mit | nbeaver/cmd-oysters,nbeaver/cmd-oysters |
860cea2b6d183414d794eb2e2d44beb7728e2d4b | hasjob/models/location.py | hasjob/models/location.py | # -*- coding: utf-8 -*-
from . import db, BaseScopedNameMixin
from flask import url_for
from .board import Board
__all__ = ['Location']
class Location(BaseScopedNameMixin, db.Model):
"""
A location where jobs are listed, using geonameid for primary key. Scoped to a board
"""
__tablename__ = 'location'
id = db.Column(db.Integer, primary_key=True, autoincrement=False)
geonameid = db.synonym('id')
board_id = db.Column(None, db.ForeignKey('board.id'), nullable=False, primary_key=True, index=True)
parent = db.synonym('board_id')
board = db.relationship(Board, backref=db.backref('locations', lazy='dynamic', cascade='all, delete-orphan'))
#: Landing page description
description = db.Column(db.UnicodeText, nullable=True)
__table_args__ = (db.UniqueConstraint('board_id', 'name'),)
def url_for(self, action='view', **kwargs):
subdomain = self.board.name if self.board.not_root else None
if action == 'view':
return url_for('browse_by_location', location=self.name, subdomain=subdomain, **kwargs)
elif action == 'edit':
return url_for('location_edit', name=self.name, subdomain=subdomain, **kwargs)
@classmethod
def get(cls, name, board):
return cls.query.filter_by(name=name, board=board).one_or_none()
| # -*- coding: utf-8 -*-
from . import db, BaseScopedNameMixin
from flask import url_for
from .board import Board
__all__ = ['Location']
class Location(BaseScopedNameMixin, db.Model):
"""
A location where jobs are listed, using geonameid for primary key. Scoped to a board
"""
__tablename__ = 'location'
id = db.Column(db.Integer, primary_key=True, autoincrement=False)
geonameid = db.synonym('id')
board_id = db.Column(None, db.ForeignKey('board.id'), nullable=False, primary_key=True, index=True)
board = db.relationship(Board, backref=db.backref('locations', lazy='dynamic', cascade='all, delete-orphan'))
parent = db.synonym('board')
#: Landing page description
description = db.Column(db.UnicodeText, nullable=True)
__table_args__ = (db.UniqueConstraint('board_id', 'name'),)
def url_for(self, action='view', **kwargs):
subdomain = self.board.name if self.board.not_root else None
if action == 'view':
return url_for('browse_by_location', location=self.name, subdomain=subdomain, **kwargs)
elif action == 'edit':
return url_for('location_edit', name=self.name, subdomain=subdomain, **kwargs)
@classmethod
def get(cls, name, board):
return cls.query.filter_by(name=name, board=board).one_or_none()
| Fix parent synonym for Location model | Fix parent synonym for Location model
| Python | agpl-3.0 | hasgeek/hasjob,hasgeek/hasjob,hasgeek/hasjob,hasgeek/hasjob |
401f98ad74792e9a5d9354dec8c24dc9637d1f5e | tests/gsim/pezeshk_2011_test.py | tests/gsim/pezeshk_2011_test.py | # The Hazard Library
# Copyright (C) 2013 GEM Foundation
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from openquake.hazardlib.gsim.pezeshk_2011 import Pezeshk2011
from tests.gsim.utils import BaseGSIMTestCase
class Pezeshk2011TestCase(BaseGSIMTestCase):
GSIM_CLASS = Pezeshk2011
# Test data were obtained from a tool given by the authors
# The data of the values of the mean PGA and SA are in g's.
def test_mean(self):
self.check('PEZE11/PZ11_MEAN.csv',
max_discrep_percentage=0.5)
def test_std_total(self):
self.check('PEZE11/PZ11_STD_TOTAL.csv',
max_discrep_percentage=0.5)
| # The Hazard Library
# Copyright (C) 2013 GEM Foundation
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from openquake.hazardlib.gsim.pezeshk_2011 import PezeshkEtAl2011
from tests.gsim.utils import BaseGSIMTestCase
class Pezeshk2011EtAlTestCase(BaseGSIMTestCase):
GSIM_CLASS = PezeshkEtAl2011
# Test data were obtained from a tool given by the authors
# The data of the values of the mean PGA and SA are in g's.
def test_mean(self):
self.check('PEZE11/PZ11_MEAN.csv',
max_discrep_percentage=0.5)
def test_std_total(self):
self.check('PEZE11/PZ11_STD_TOTAL.csv',
max_discrep_percentage=0.5)
| Add implementation of gmpe Pezeshk et al 2011 for ENA | Add implementation of gmpe Pezeshk et al 2011 for ENA
| Python | agpl-3.0 | vup1120/oq-hazardlib,gem/oq-engine,g-weatherill/oq-hazardlib,gem/oq-hazardlib,gem/oq-hazardlib,g-weatherill/oq-hazardlib,gem/oq-engine,gem/oq-engine,rcgee/oq-hazardlib,mmpagani/oq-hazardlib,g-weatherill/oq-hazardlib,gem/oq-hazardlib,ROB-Seismology/oq-hazardlib,silviacanessa/oq-hazardlib,vup1120/oq-hazardlib,ROB-Seismology/oq-hazardlib,silviacanessa/oq-hazardlib,larsbutler/oq-hazardlib,ROB-Seismology/oq-hazardlib,silviacanessa/oq-hazardlib,larsbutler/oq-hazardlib,silviacanessa/oq-hazardlib,gem/oq-engine,larsbutler/oq-hazardlib,g-weatherill/oq-hazardlib,rcgee/oq-hazardlib,vup1120/oq-hazardlib,ROB-Seismology/oq-hazardlib,gem/oq-engine,mmpagani/oq-hazardlib,mmpagani/oq-hazardlib |
873a383d8285a94ae1c8c61b515ca96f96e41b0c | migrations/versions/1340_set_suppliers_active_flag_not_nullable.py | migrations/versions/1340_set_suppliers_active_flag_not_nullable.py | """set suppliers active flag NOT NULLABLE
Ensure that all suppliers are either active or inactive.
Revision ID: 1340
Revises: 1330
Create Date: 2019-06-26 11:53:56.085586
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '1340'
down_revision = '1330'
def upgrade():
# We want this column to be NOT NULLABLE, so we need to set any NULL
# values. NULLs are active suppliers (i.e. they have not been made
# inactive).
op.execute("UPDATE suppliers SET active = true WHERE active = NULL")
op.alter_column('suppliers', 'active', nullable=False)
def downgrade():
op.alter_column('suppliers', 'active', nullable=True)
| """set suppliers active flag NOT NULLABLE
Ensure that all suppliers are either active or inactive.
Revision ID: 1340
Revises: 1330
Create Date: 2019-06-26 11:53:56.085586
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '1340'
down_revision = '1330'
def upgrade():
# We want this column to be NOT NULLABLE, so we need to set any NULL
# values. NULLs are active suppliers (i.e. they have not been made
# inactive).
op.execute("UPDATE suppliers SET active = true WHERE active is NULL")
op.alter_column('suppliers', 'active', nullable=False)
def downgrade():
op.alter_column('suppliers', 'active', nullable=True)
| Fix comparison with NULL bug | Fix comparison with NULL bug
| Python | mit | alphagov/digitalmarketplace-api,alphagov/digitalmarketplace-api,alphagov/digitalmarketplace-api |
87771bda7fbf46519097ba433a7b4fd3f2cbaa7e | office_lunch_order/office_lunch_order_app/tests.py | office_lunch_order/office_lunch_order_app/tests.py | from django.test import TestCase, Client
c = Client()
response = c.get('/officelunchorder/')
response.status_code # 200
response.content
response = c.post('/officelunchorder/login/')
response.status_code # 200
response.content
response = c.get('/officelunchorder/logout/')
response.status_code # 200
response.content
response = c.get('/officelunchorder/orders/')
response.status_code # 302 found
response = c.get('/officelunchorder/new_order/')
response.status_code # 302 found
response = c.post('/officelunchorder/close_order/')
print(response.status_code) # 302 found | from django.test import TestCase, Client
c = Client()
response = c.get('/officelunchorder/')
response.status_code # 200
response.content
response = c.post('/officelunchorder/login/')
response.status_code # 200
response.content
response = c.get('/officelunchorder/logout/')
response.status_code # 200
response.content
response = c.get('/officelunchorder/orders/')
response.status_code # 302 found
response = c.get('/officelunchorder/new_order/')
response.status_code # 302 found
response = c.get('/officelunchorder/add_order/25/') # existing order_id
response.status_code # 302 found
response = c.get('/officelunchorder/order_details/25/') # existing order_id
response.status_code # 302 found
response = c.post('/officelunchorder/close_order/')
response.status_code # 302 found
| Test add_order and order details with existing order_id url | Test add_order and order details with existing order_id url
| Python | epl-1.0 | MariuszKorotko/Office_Lunch_Order,MariuszKorotko/Office_Lunch_Order |
4c60e42af4b37c260e2a9f00eb82dbd44ee53799 | __init__.py | __init__.py | # imports for Pyrge package
__all__ = ['effects',
'emitter',
'entity',
'gameloop',
'mixin',
'music',
'point',
'quadtree',
'sound',
'spritesheet',
'text',
'tiledimage',
'tilemap',
'tween',
'tweenfunc',
'util',
'world',
'Game', 'Constants', 'Point', 'Vector', 'GameLoop', 'World']
# convenience imports
import entity, gameloop, util, world, mixin, music, point, sound, text, \
tiledimage, tilemap, tween, tweenfunc, emitter, effects
from gameloop import Game, GameLoop
from world import World
from point import Point, Vector
from entity import Image, Entity
Constants = Game.Constants
"""A number of useful constants, such as keycodes, event types, and display flags."""
| # imports for Pyrge package
__all__ = ['effects',
'emitter',
'entity',
'gameloop',
'mixin',
'music',
'point',
'quadtree',
'sound',
'spritesheet',
'text',
'tiledimage',
'tilemap',
'tween',
'tweenfunc',
'util',
'world',
'Game', 'Constants', 'Point', 'Vector',
'GameLoop', 'World', 'Image', 'Entity']
# convenience imports
import entity, gameloop, util, world, mixin, music, point, sound, text, \
tiledimage, tilemap, tween, tweenfunc, emitter, effects
from gameloop import Game, GameLoop
from world import World
from point import Point, Vector
from entity import Image, Entity
Constants = Game.Constants
"""A number of useful constants, such as keycodes, event types, and display flags."""
| Put Image and Entity into __all__ | Put Image and Entity into __all__
| Python | lgpl-2.1 | momikey/pyrge |
c01a858306d31a5b12e42f30ff01bdbdb2240092 | froide/publicbody/tests.py | froide/publicbody/tests.py | """
This file demonstrates writing tests using the unittest module. These will pass
when you run "manage.py test".
Replace this with more appropriate tests for your application.
"""
from django.test import TestCase
class SimpleTest(TestCase):
def test_basic_addition(self):
"""
Tests that 1 + 1 always equals 2.
"""
self.assertEqual(1 + 1, 2)
| from django.test import TestCase
from django.core.urlresolvers import reverse
from publicbody.models import PublicBody
class PublicBodyTest(TestCase):
fixtures = ['auth.json', 'publicbodies.json', 'foirequest.json']
def test_web_page(self):
response = self.client.get(reverse('publicbody-list'))
self.assertEqual(response.status_code, 200)
pb = PublicBody.objects.all()[0]
response = self.client.get(reverse('publicbody-show', kwargs={"slug": pb.slug}))
self.assertEqual(response.status_code, 200)
response = self.client.get(reverse('publicbody-show_json', kwargs={"pk": pb.pk, "format": "json"}))
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
self.assertIn('"name":', response.content)
self.assertIn('"laws": [{', response.content)
response = self.client.get(reverse('publicbody-show_json', kwargs={"slug": pb.slug, "format": "json"}))
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
def test_csv(self):
csv = PublicBody.export_csv()
self.assertTrue(csv)
def test_search(self):
response = self.client.get(reverse('publicbody-search')+"?q=umwelt")
self.assertIn("Umweltbundesamt", response.content)
self.assertEqual(response['Content-Type'], 'application/json')
| Test public body showing, json view and csv export | Test public body showing, json view and csv export | Python | mit | okfse/froide,ryankanno/froide,catcosmo/froide,ryankanno/froide,okfse/froide,LilithWittmann/froide,okfse/froide,LilithWittmann/froide,ryankanno/froide,CodeforHawaii/froide,stefanw/froide,stefanw/froide,LilithWittmann/froide,CodeforHawaii/froide,catcosmo/froide,catcosmo/froide,stefanw/froide,ryankanno/froide,fin/froide,fin/froide,catcosmo/froide,okfse/froide,fin/froide,LilithWittmann/froide,stefanw/froide,catcosmo/froide,ryankanno/froide,LilithWittmann/froide,fin/froide,stefanw/froide,CodeforHawaii/froide,CodeforHawaii/froide,CodeforHawaii/froide,okfse/froide |
076f8cf27d3a1b52a1b597e224d23bd2ba18fcd7 | kalamarsite.py | kalamarsite.py | import os
import kalamar.site
from kalamar.access_point.cache import Cache
from kalamar.access_point.xml.rest import Rest, RestProperty, TITLE
from kalamar.access_point.filesystem import FileSystem
from sitenco import PROJECTS_PATH
page = Rest(
FileSystem(
PROJECTS_PATH, r'(.*)/pages/(.*)\.rst', ('project', 'page')),
[('title', RestProperty(unicode, TITLE))], 'content')
news = Rest(
FileSystem(
PROJECTS_PATH, r'(.*)/news/(.*)/(.*)\.rst',
('project', 'writer', 'datetime')),
[('title', RestProperty(unicode, TITLE))], 'content')
tutorial = Cache(
Rest(
FileSystem(
PROJECTS_PATH, r'(.*)/tutorials/(.*)\.rst', ('project', 'tutorial')),
[('title', RestProperty(unicode, TITLE)),
('abstract', RestProperty(unicode, '//topic/paragraph'))], 'content'))
SITE = kalamar.site.Site()
SITE.register('page', page)
SITE.register('news', news)
SITE.register('tutorial', tutorial)
| import os
import kalamar.site
from kalamar.access_point.cache import Cache
from kalamar.access_point.xml.rest import Rest, RestProperty, TITLE
from kalamar.access_point.filesystem import FileSystem
from sitenco import PROJECTS_PATH
page = Rest(
FileSystem(
PROJECTS_PATH, r'([a-z]*)/pages/(.*)\.rst', ('project', 'page')),
[('title', RestProperty(unicode, TITLE))], 'content')
news = Rest(
FileSystem(
PROJECTS_PATH, r'([a-z]*)/news/(.*)/(.*)\.rst',
('project', 'writer', 'datetime')),
[('title', RestProperty(unicode, TITLE))], 'content')
tutorial = Cache(
Rest(
FileSystem(
PROJECTS_PATH,
r'([a-z]*)/tutorials/(.*)\.rst', ('project', 'tutorial')),
[('title', RestProperty(unicode, TITLE)),
('abstract', RestProperty(unicode, '//topic/paragraph'))], 'content'))
SITE = kalamar.site.Site()
SITE.register('page', page)
SITE.register('news', news)
SITE.register('tutorial', tutorial)
| Use [a-z]* pattern to match project ids | Use [a-z]* pattern to match project ids
| Python | bsd-3-clause | Kozea/sitenco |
696010e636f7e30ba331b103ba051422780edf4b | bluebottle/funding/utils.py | bluebottle/funding/utils.py | from babel.numbers import get_currency_name, get_currency_symbol
from bluebottle.utils.exchange_rates import convert
from django.db.models import Sum
from djmoney.money import Money
from bluebottle.funding.models import PaymentProvider
def get_currency_settings():
result = []
for provider in PaymentProvider.objects.all():
for cur in provider.paymentcurrency_set.all():
result.append({
'provider': provider.name,
'providerName': provider.title,
'code': cur.code,
'name': get_currency_name(cur.code),
'symbol': get_currency_symbol(cur.code).replace('US$', '$').replace('NGN', '₦'),
'defaultAmounts': [
cur.default1,
cur.default2,
cur.default3,
cur.default4,
],
'minAmount': cur.min_amount,
'maxAmount': cur.max_amount
})
return result
def calculate_total(queryset, target='EUR'):
totals = queryset.values(
'donor__amount_currency'
).annotate(
total=Sum('donor__amount')
).order_by('-created')
amounts = [Money(tot['total'], tot['donor__amount_currency']) for tot in totals]
amounts = [convert(amount, target) for amount in amounts]
return sum(amounts) or Money(0, target)
| from babel.numbers import get_currency_name, get_currency_symbol
from bluebottle.utils.exchange_rates import convert
from django.db.models import Sum
from djmoney.money import Money
from bluebottle.funding.models import PaymentProvider
def get_currency_settings():
result = []
for provider in PaymentProvider.objects.all():
for cur in provider.paymentcurrency_set.all():
result.append({
'provider': provider.name,
'providerName': provider.title,
'code': cur.code,
'name': get_currency_name(cur.code),
'symbol': get_currency_symbol(cur.code).replace('US$', '$').replace('NGN', '₦'),
'defaultAmounts': [
cur.default1,
cur.default2,
cur.default3,
cur.default4,
],
'minAmount': cur.min_amount,
'maxAmount': cur.max_amount
})
return result
def calculate_total(queryset, target='EUR'):
totals = queryset.values(
'donor__payout_amount_currency'
).annotate(
total=Sum('donor__payout_amount')
).order_by('-created')
amounts = [Money(tot['total'], tot['donor__payout_amount_currency']) for tot in totals]
amounts = [convert(amount, target) for amount in amounts]
return sum(amounts) or Money(0, target)
| USe payout amount to calculate total | USe payout amount to calculate total
| Python | bsd-3-clause | onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle |
1f250c6113ed69dc3373afbc40a93bdc7d8e7894 | pages_scrape.py | pages_scrape.py | import logging
import requests
def scrape(url, extractor):
"""
Function to request and parse a given URL. Returns only the "relevant"
text.
Parameters
----------
url : String.
URL to request and parse.
extractor : Goose class instance.
An instance of Goose that allows for parsing of content.
Returns
-------
text : String.
Parsed text from the specified website.
meta : String.
Parsed meta description of an article. Usually equivalent to the
lede.
"""
logger = logging.getLogger('scraper_log')
try:
headers = {'User-Agent': "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1700.107 Safari/537.36"}
page = requests.get(url, headers=headers)
try:
article = extractor.extract(raw_html=page.content)
text = article.cleaned_text
meta = article.meta_description
return text, meta
#Generic error catching is bad
except Exception, e:
print 'There was an error. Check the log file for more information.'
logger.warning('Problem scraping URL: {}. {}.'.format(url, e))
except Exception, e:
print 'There was an error. Check the log file for more information.'
logger.warning('Problem requesting url: {}. {}'.format(url, e))
| import logging
import requests
def scrape(url, extractor):
"""
Function to request and parse a given URL. Returns only the "relevant"
text.
Parameters
----------
url : String.
URL to request and parse.
extractor : Goose class instance.
An instance of Goose that allows for parsing of content.
Returns
-------
text : String.
Parsed text from the specified website.
meta : String.
Parsed meta description of an article. Usually equivalent to the
lede.
"""
logger = logging.getLogger('scraper_log')
try:
headers = {'User-Agent': "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1700.107 Safari/537.36"}
page = requests.get(url, headers=headers)
try:
try:
article = extractor.extract(raw_html=page.content)
except UnicodeDecodeError:
article = extractor.extract(raw_html=page.content.decode('utf-8',
errors='replace'))
text = article.cleaned_text
meta = article.meta_description
return text, meta
#Generic error catching is bad
except Exception, e:
print 'There was an error. Check the log file for more information.'
logger.warning('Problem scraping URL: {}. {}.'.format(url, e))
except Exception, e:
print 'There was an error. Check the log file for more information.'
logger.warning('Problem requesting url: {}. {}'.format(url, e))
| Handle UTF errors with invalid bytes. | Handle UTF errors with invalid bytes.
| Python | mit | openeventdata/scraper,chilland/scraper |
38746e4f4891f7ad87ce678776be15556d1db449 | gcl/to_json.py | gcl/to_json.py | import argparse
import json
import sys
import gcl
from gcl import query
from gcl import util
def main(argv=None, stdin=None):
parser = argparse.ArgumentParser(description='Convert (parts of) a GCL model file to JSON.')
parser.add_argument('file', metavar='FILE', type=str, nargs='?',
help='File to parse')
parser.add_argument('selectors', metavar='SELECTOR', type=str, nargs='*',
help='Subnodes to convert. The first selector will be treated as the root of the printed output.')
args = parser.parse_args(argv or sys.argv[1:])
try:
if args.file and args.file != '-':
model = gcl.load(args.file)
else:
model = gcl.loads((stdin or sys.stdin).read(), filename='<stdin>')
sels = query.GPath(args.selectors)
if not sels.everything():
model = sels.select(model).deep()
plain = util.to_python(model)
sys.stdout.write(json.dumps(plain))
except (gcl.ParseError, RuntimeError) as e:
sys.stderr.write(str(e) + '\n')
sys.exit(1)
| import argparse
import json
import sys
import gcl
from gcl import query
from gcl import util
def select(dct, path):
for part in path:
if not hasattr(dct, 'keys'):
raise RuntimeError('Value %r cannot be indexed with %r' % (dct, part))
if part not in dct:
raise RuntimeError('Value %r has no key %r' % (dct, part))
dct = dct[part]
return dct
def main(argv=None, stdin=None):
parser = argparse.ArgumentParser(description='Convert (parts of) a GCL model file to JSON.')
parser.add_argument('file', metavar='FILE', type=str, nargs='?',
help='File to parse')
parser.add_argument('selectors', metavar='SELECTOR', type=str, nargs='*',
help='Select nodes to include in the JSON.')
parser.add_argument('--root', '-r', metavar='PATH', type=str, default='',
help='Use the indicated root path as the root of the output JSON object (like a.b.c but without wildcards)')
args = parser.parse_args(argv or sys.argv[1:])
try:
if args.file and args.file != '-':
model = gcl.load(args.file)
else:
model = gcl.loads((stdin or sys.stdin).read(), filename='<stdin>')
sels = query.GPath(args.selectors)
if not sels.everything():
model = sels.select(model).deep()
plain = util.to_python(model)
selectors = args.root.split('.') if args.root else []
selected = select(plain, selectors)
sys.stdout.write(json.dumps(selected))
except (gcl.ParseError, RuntimeError) as e:
sys.stderr.write(str(e) + '\n')
sys.exit(1)
| Add proper root selector to gcl2json | Add proper root selector to gcl2json
| Python | mit | rix0rrr/gcl |
3c3e9b5f584c23c9359ca9dce71b89635fffd043 | LiSE/LiSE/tests/test_load.py | LiSE/LiSE/tests/test_load.py | import os
import shutil
import pytest
from LiSE.engine import Engine
from LiSE.examples.kobold import inittest
def test_keyframe_load_init(tempdir):
"""Can load a keyframe at start of branch, including locations"""
eng = Engine(tempdir)
inittest(eng)
eng.branch = 'new'
eng.snap_keyframe()
eng.close()
eng = Engine(tempdir)
assert 'kobold' in eng.character['physical'].thing
assert (0, 0) in eng.character['physical'].place
assert (0, 1) in eng.character['physical'].portal[0, 0]
eng.close()
def test_multi_keyframe(tempdir):
eng = Engine(tempdir)
inittest(eng, kobold_pos=(9, 9))
eng.snap_keyframe()
tick0 = eng.tick
eng.turn = 1
eng.character['physical'].thing['kobold']['location'] = (3, 3)
eng.snap_keyframe()
tick1 = eng.tick
eng.close()
eng = Engine(tempdir)
eng._load_at('trunk', 0, tick0+1)
assert eng._things_cache.keyframe['physical']['trunk'][0][tick0]\
!= eng._things_cache.keyframe['physical']['trunk'][1][tick1]
| import os
import shutil
import pytest
from LiSE.engine import Engine
from LiSE.examples.kobold import inittest
def test_keyframe_load_init(tempdir):
"""Can load a keyframe at start of branch, including locations"""
eng = Engine(tempdir)
inittest(eng)
eng.branch = 'new'
eng.snap_keyframe()
eng.close()
eng = Engine(tempdir)
assert 'kobold' in eng.character['physical'].thing
assert (0, 0) in eng.character['physical'].place
assert (0, 1) in eng.character['physical'].portal[0, 0]
eng.close()
def test_multi_keyframe(tempdir):
eng = Engine(tempdir)
inittest(eng)
eng.snap_keyframe()
tick0 = eng.tick
eng.turn = 1
del eng.character['physical'].place[3, 3]
eng.snap_keyframe()
tick1 = eng.tick
eng.close()
eng = Engine(tempdir)
eng._load_at('trunk', 0, tick0+1)
assert eng._nodes_cache.keyframe['physical', ]['trunk'][0][tick0]\
!= eng._nodes_cache.keyframe['physical', ]['trunk'][1][tick1]
| Make test_multi_keyframe demonstrate what it's supposed to | Make test_multi_keyframe demonstrate what it's supposed to
I was testing a cache that wasn't behaving correctly for
unrelated reasons.
| Python | agpl-3.0 | LogicalDash/LiSE,LogicalDash/LiSE |
972cb7c234729d2ce8bbab0937f8efbfe18a2eeb | lab_members/models.py | lab_members/models.py | from django.db import models
class Position(models.Model):
class Meta:
verbose_name = "Position"
verbose_name_plural = "Positions"
title = models.CharField(u'title',
blank=False,
default='',
help_text=u'Please enter a title for this position',
max_length=64,
unique=True,
)
def __str__(self):
pass
class Scientist(models.Model):
class Meta:
verbose_name = "Scientist"
verbose_name_plural = "Scientists"
full_name = models.CharField(u'full name',
blank=False,
default='',
help_text=u'Please enter a full name for this scientist',
max_length=64,
unique=True,
)
slug = models.SlugField(u'slug',
blank=False,
default='',
help_text=u'Please enter a unique slug for this scientist',
max_length=64,
)
title = models.ForeignKey('lab_members.Position',
blank=True,
default=None,
help_text=u'Please specify a title for this scientist',
null=True,
)
def __str__(self):
pass
| from django.db import models
class Position(models.Model):
class Meta:
verbose_name = "Position"
verbose_name_plural = "Positions"
title = models.CharField(u'title',
blank=False,
default='',
help_text=u'Please enter a title for this position',
max_length=64,
unique=True,
)
def __str__(self):
return self.title
class Scientist(models.Model):
class Meta:
verbose_name = "Scientist"
verbose_name_plural = "Scientists"
full_name = models.CharField(u'full name',
blank=False,
default='',
help_text=u'Please enter a full name for this scientist',
max_length=64,
unique=True,
)
slug = models.SlugField(u'slug',
blank=False,
default='',
help_text=u'Please enter a unique slug for this scientist',
max_length=64,
)
title = models.ForeignKey('lab_members.Position',
blank=True,
default=None,
help_text=u'Please specify a title for this scientist',
null=True,
)
def __str__(self):
return self.full_name
| Fix error: __str__ returned non-string (type NoneType) | Fix error: __str__ returned non-string (type NoneType)
| Python | bsd-3-clause | mfcovington/django-lab-members,mfcovington/django-lab-members,mfcovington/django-lab-members |
9ad049bdac489e5f500f8bf8ec0cd615ccacadbf | stack/logs.py | stack/logs.py | from troposphere import Join, iam, logs
from .common import arn_prefix
from .template import template
container_log_group = logs.LogGroup(
"ContainerLogs",
template=template,
RetentionInDays=365,
DeletionPolicy="Retain",
)
logging_policy = iam.Policy(
PolicyName="LoggingPolicy",
PolicyDocument=dict(
Statement=[dict(
Effect="Allow",
Action=[
"logs:Create*",
"logs:PutLogEvents",
],
Resource=Join("", [
arn_prefix,
":logs:*:*:*", # allow logging to any log group
]),
)],
),
)
| from troposphere import Join, iam, logs
from .common import arn_prefix
from .template import template
container_log_group = logs.LogGroup(
"ContainerLogs",
template=template,
RetentionInDays=365,
DeletionPolicy="Retain",
)
logging_policy = iam.Policy(
PolicyName="LoggingPolicy",
PolicyDocument=dict(
Statement=[dict(
Effect="Allow",
Action=[
"logs:Create*",
"logs:PutLogEvents",
# Needed by aws-for-fluent-bit:
"logs:DescribeLogGroups",
"logs:DescribeLogStreams",
],
Resource=Join("", [
arn_prefix,
":logs:*:*:*", # allow logging to any log group
]),
)],
),
)
| Add logging permissions needed by aws-for-fluent-bit | Add logging permissions needed by aws-for-fluent-bit | Python | mit | tobiasmcnulty/aws-container-basics,caktus/aws-web-stacks |
e1ad3190e124163c0e7e0e7fc03cfea6f43f0cf8 | stack/vpc.py | stack/vpc.py | from troposphere.ec2 import (
VPC,
)
from .template import template
vpc = VPC(
"Vpc",
template=template,
CidrBlock="10.0.0.0/16",
)
| from troposphere import (
Ref,
)
from troposphere.ec2 import (
InternetGateway,
VPC,
VPCGatewayAttachment,
)
from .template import template
vpc = VPC(
"Vpc",
template=template,
CidrBlock="10.0.0.0/16",
)
# Allow outgoing to outside VPC
internet_gateway = InternetGateway(
"InternetGateway",
template=template,
)
# Attach Gateway to VPC
VPCGatewayAttachment(
"GatewayAttachement",
template=template,
VpcId=Ref(vpc),
InternetGatewayId=Ref(internet_gateway),
)
| Attach an `InternetGateway` to the `VPC` | Attach an `InternetGateway` to the `VPC`
| Python | mit | tobiasmcnulty/aws-container-basics,caktus/aws-web-stacks |
92aeffe058bfd724309ddcdbdab9226057074afe | masters/master.chromium.lkgr/master_source_cfg.py | masters/master.chromium.lkgr/master_source_cfg.py | # Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from buildbot.changes.pb import PBChangeSource
def Update(config, active_master, c):
# Polls config.Master.trunk_url for changes
c['change_source'].append(PBChangeSource())
| # Copyright (c) 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from master.url_poller import URLPoller
LKGR_URL = 'https://chromium-status.appspot.com/lkgr'
def Update(config, active_master, c):
c['change_source'].append(
URLPoller(changeurl=LKGR_URL, pollInterval=300,
category='lkgr', include_revision=True))
| Switch master.chromium.lkgr to poll the chromium-status app. | Switch master.chromium.lkgr to poll the chromium-status app.
Using a PBChangeSource is silly, opaque, and potentially dangerous. We already
have a URLPoller for exactly this use-case (already in use by chromium.endure)
so let's use it here too. This also has the advantage of making sure
the LKGR waterfall picks up *all* updates to LKGR, including manual ones.
R=iannucci@chromium.org, phajdan.jr@chromium.org
BUG=366954
Review URL: https://codereview.chromium.org/255753002
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@266093 0039d316-1c4b-4281-b951-d872f2087c98
| Python | bsd-3-clause | eunchong/build,eunchong/build,eunchong/build,eunchong/build |
bdbb64dc494ef514deccf6e81423886df19b0b29 | markerutils.py | markerutils.py | PI = 3.141592654
SAMPLE_TIME = 5
FRAMES_PER_SEC = 30
SEC_PER_MIN = 60
RATE_RANGE = (95, 125) # 100 - 120 BPM (plus some error in measurement)
DEPTH_RANGE = (2, 5) # At least 2 in (plus some error in measurement)
RECOIL_THRESH = 0.2 # Allow for FULL chest recoil
# Color ranges
GREEN_COLOR_MIN = (38, 68, 87)
GREEN_COLOR_MAX = (72, 183, 255)
VIOLET_COLOR_MIN = (140, 110, 21)
VIOLET_COLOR_MAX = (206, 255, 236)
YELLOW_COLOR_MIN = (10, 110, 96)
YELLOW_COLOR_MAX = (47, 255, 255)
def ftoi_point(point):
return int(point[0]), int(point[1])
def get_ellipse_size(ellipse):
return max(ellipse[1][0], ellipse[1][1])
def ellipse_area(ellipse):
return ellipse[1][0] * ellipse[1][1] * PI / 4
def get_pixel_size(marker):
return get_ellipse_size(marker[1])
| PI = 3.141592654
SAMPLE_TIME = 5
FRAMES_PER_SEC = 30
SEC_PER_MIN = 60
RATE_RANGE = (95, 125) # 100 - 120 BPM (plus some error in measurement)
DEPTH_RANGE = (1.95, 5) # At least 2 in (plus some error in measurement)
RECOIL_THRESH = 0.2 # Allow for FULL chest recoil
# Color ranges
GREEN_COLOR_MIN = (38, 68, 87)
GREEN_COLOR_MAX = (72, 183, 255)
VIOLET_COLOR_MIN = (140, 110, 21)
VIOLET_COLOR_MAX = (206, 255, 236)
YELLOW_COLOR_MIN = (10, 110, 96)
YELLOW_COLOR_MAX = (47, 255, 255)
def ftoi_point(point):
return int(point[0]), int(point[1])
def get_ellipse_size(ellipse):
return max(ellipse[1][0], ellipse[1][1])
def ellipse_area(ellipse):
return ellipse[1][0] * ellipse[1][1] * PI / 4
def get_pixel_size(marker):
return get_ellipse_size(marker[1])
| Add a wider margin of accuracy | Add a wider margin of accuracy
| Python | mit | Isaac-W/cpr-vision-measurement,Isaac-W/cpr-vision-measurement,Isaac-W/cpr-vision-measurement |
7fa20f228a673ee983af47910f10851c126a9308 | src/foremast/plugin_manager.py | src/foremast/plugin_manager.py | from pluginbase import PluginBase
class PluginManager:
def __init__(self, paths, provider):
self.paths = [paths]
self.provider = provider
plugin_base = PluginBase(package='foremast.plugins')
self.plugin_source = plugin_base.make_plugin_source(searchpath=self.paths)
def plugins(self):
for plugin in self.plugin_source.list_plugins():
yield plugin
def load(self):
return self.plugin_source.load_plugin(self.provider)
| """Manager to handle plugins"""
from pluginbase import PluginBase
class PluginManager:
"""Class to manage and create Spinnaker applications
Args:
paths (str): Path of plugin directory.
provider (str): The name of the cloud provider.
"""
def __init__(self, paths, provider):
self.paths = [paths]
self.provider = provider
plugin_base = PluginBase(package='foremast.plugins')
self.plugin_source = plugin_base.make_plugin_source(searchpath=self.paths)
def plugins(self):
"""List of all plugins available."""
for plugin in self.plugin_source.list_plugins():
yield plugin
def load(self):
"""Load the plugin object."""
return self.plugin_source.load_plugin(self.provider)
| Add docstring to plugin manager | chore: Add docstring to plugin manager
| Python | apache-2.0 | gogoair/foremast,gogoair/foremast |
a2ced7a752c033cef1a1da1fb246b99f0895f86a | src/objectdictionary.py | src/objectdictionary.py | import collections
class ObjectDictionary(collections.Mapping):
def __init__(self):
self.names = {}
self.ids = {}
@classmethod
def initialize(edsPath):
pass
def __setitem__(self,key,value):
pass
def __getitem__(self,key):
pass
def __iter__():
pass
def __len__():
pass
if __name__ == '__main__':
cow = ObjectDictionary()
| import collections
class ObjectDictionary(collections.Mapping):
def __init__(self):
self.names = {}
self.ids = {}
@classmethod
def initialize(edsPath):
pass
def __setitem__(self,key,value):
if type(key) is str:
self.names[key] = value
else:
self.ids[key] = value
def __getitem__(self,key):
if type(key) is str:
return self.names[key]
else:
return self.ids[key]
def __iter__(self):
for objitem in self.ids:
yield objitem
def __len__(self):
return len(self.ids)
if __name__ == '__main__':
test = ObjectDictionary()
| Add Mapping methods to ObjectDictionary | Add Mapping methods to ObjectDictionary
| Python | mit | aceofwings/Evt-Gateway,aceofwings/Evt-Gateway |
89bf3b348d9849a936cf4f433d0156242fda7f72 | robert/__init__.py | robert/__init__.py | """
Entry point and the only view we have.
"""
from .article_utils import get_articles
from flask import Flask, render_template
from os import path
app = Flask(__name__)
config_path = path.join(path.dirname(__file__), 'config.py')
app.config.from_pyfile(config_path)
@app.route('/')
def frontpage():
articles = get_articles()
context = {
'articles': articles,
'debug': app.config.get('DEBUG', False),
}
return render_template('base.html', **context)
| """
Entry point and the only view we have.
"""
from .article_utils import get_articles
from flask import Flask, render_template
from os import path
app = Flask(__name__)
config_path = path.abspath(path.join(path.dirname(__file__), 'config.py'))
app.config.from_pyfile(config_path)
@app.route('/')
def frontpage():
articles = get_articles()
context = {
'articles': articles,
'debug': app.config.get('DEBUG', False),
}
return render_template('base.html', **context)
| Use absolute path when loading config. | Use absolute path when loading config.
| Python | mit | thusoy/robertblag,thusoy/robertblag,thusoy/robertblag |
c388301a3d6adfe50261557c6a2bf624c257c3a2 | roboime/options.py | roboime/options.py | # -*- coding: utf-8 -*-
"""
General options during execution
"""
#Position Log filename. Use None to disable.
position_log_filename = "math/pos_log.txt"
#position_log_filename = None
#Command and Update Log filename. Use None to disable.
cmdupd_filename = "math/commands.txt"
#cmdupd_filename = None
#Gaussian noise addition variances
noise_var_x = 3.E-5
noise_var_y = 3.E-5
noise_var_angle = 1.
# Process error estimate. The lower (higher negative exponent), more the filter
# becomes like a Low-Pass Filter (higher confidence in the model prediction).
Q = 1e-5
# Measurement error variances (for the R matrix).
# The higher (lower negative exponent), more the filter becomes like a
# Low-Pass Filter (higher possible measurement error).
R_var_x = 3.E-5
R_var_y = 3.E-5
R_var_angle = 3
| # -*- coding: utf-8 -*-
"""
General options during execution
"""
#Position Log filename. Use None to disable.
position_log_filename = "math/pos_log.txt"
#position_log_filename = None
#Command and Update Log filename. Use None to disable.
cmdupd_filename = "math/commands.txt"
#cmdupd_filename = None
#Gaussian noise addition variances
noise_var_x = 3.E-5
noise_var_y = 3.E-5
noise_var_angle = 1.
# Process error estimate. The lower (higher negative exponent), more the filter
# becomes like a Low-Pass Filter (higher confidence in the model prediction).
Q = 1e-5
# Measurement error variances (for the R matrix).
# The higher (lower negative exponent), more the filter becomes like a
# Low-Pass Filter (higher possible measurement error).
R_var_x = 3.E-5
R_var_y = 3.E-5
R_var_angle = 1e-5
| Fix problem of robots going wrong ways with Kalman (problem was really bad R_var_angle=>too big!) | Fix problem of robots going wrong ways with Kalman (problem was really bad R_var_angle=>too big!)
| Python | agpl-3.0 | roboime/pyroboime |
4c63cc67e0dddaec396a1e955645e8c00755d299 | oidc_provider/management/commands/creatersakey.py | oidc_provider/management/commands/creatersakey.py | from Cryptodome.PublicKey import RSA
from django.core.management.base import BaseCommand
from oidc_provider.models import RSAKey
class Command(BaseCommand):
help = 'Randomly generate a new RSA key for the OpenID server'
def handle(self, *args, **options):
try:
key = RSA.generate(1024)
rsakey = RSAKey(key=key.exportKey('PEM').decode('utf8'))
rsakey.save()
self.stdout.write(u'RSA key successfully created with kid: {0}'.format(rsakey.kid))
except Exception as e:
self.stdout.write('Something goes wrong: {0}'.format(e))
| from Cryptodome.PublicKey import RSA
from django.core.management.base import BaseCommand
from oidc_provider.models import RSAKey
class Command(BaseCommand):
help = 'Randomly generate a new RSA key for the OpenID server'
def handle(self, *args, **options):
try:
key = RSA.generate(2048)
rsakey = RSAKey(key=key.exportKey('PEM').decode('utf8'))
rsakey.save()
self.stdout.write(u'RSA key successfully created with kid: {0}'.format(rsakey.kid))
except Exception as e:
self.stdout.write('Something goes wrong: {0}'.format(e))
| Increment RSA key size to 2048. | Enhancement: Increment RSA key size to 2048.
It seems like many lead institutions related with security are
recommending a minimum key length of 112-bits since 2013.
In order to achieve that, a RSA key size of 2048 (or more) is required.
| Python | mit | ByteInternet/django-oidc-provider,juanifioren/django-oidc-provider,ByteInternet/django-oidc-provider,juanifioren/django-oidc-provider |
737cd238308a4584ed296db119ed9eaa132fa2ba | item/models.py | item/models.py | from django.db import models
# Create your models here.
class Item(models.Model):
"""
Fields-
1. id = models.AutoField(primary_key=True) is created by default
2. Two Foreign Keys
Relationships-
Many to Many with Interest Group
Many to Many with Box
Ternary Relationship with Supplier and Orders
Many to Many with Selling Cycle
"""
item_name = models.CharField(max_length=40)
item_description = models.CharField(max_length=500)
item_quantity = models.IntegerField(default=0)
price_per_item = models.DecimalField(max_length=6, max_digits=2)
# TODO: Need Supplier ID and Interest ID
| from django.db import models
# Create your models here.
class Item(models.Model):
"""
Fields-
1. id = models.AutoField(primary_key=True) is created by default
2. Two Foreign Keys
Relationships-
Many to Many with Interest Group
Many to Many with Box
Ternary Relationship with Supplier and Orders
Many to Many with Selling Cycle
"""
item_name = models.CharField(max_length=40)
item_description = models.CharField(max_length=500)
item_quantity = models.IntegerField(default=0)
price_per_item = models.DecimalField(max_digits=6, decimal_places=2)
# TODO: Need Supplier ID and Interest ID
| Fix Item Model to have max_digits and decimal_places | Fix Item Model to have max_digits and decimal_places
| Python | mit | stevetu717/Databaes,KamLii/Databaes,stevetu717/Databaes,KamLii/Databaes,stevetu717/Databaes,KamLii/Databaes,stevetu717/Databaes,KamLii/Databaes |
e05a6c95d63a0e33553b0455ad4f35fb816f159a | qr_code/apps.py | qr_code/apps.py | from django.apps import AppConfig
class QrCodeConfig(AppConfig):
name = 'qr_code'
verbose_name = 'Django QR code'
| from django.apps import AppConfig
class QrCodeConfig(AppConfig):
name = 'qr_code'
verbose_name = 'Django QR Code'
| Fix title casing for verbose app name. | Fix title casing for verbose app name.
| Python | bsd-3-clause | dprog-philippe-docourt/django-qr-code,dprog-philippe-docourt/django-qr-code,dprog-philippe-docourt/django-qr-code |
2a8350afb3cd7c94d5c3beb891248a584120fe25 | kitsune/sumo/__init__.py | kitsune/sumo/__init__.py | class ProgrammingError(Exception):
"""Somebody made a mistake in the code."""
# Just importing monkeypatch does the trick - don't remove this line
from kitsune.sumo import monkeypatch
| class ProgrammingError(Exception):
"""Somebody made a mistake in the code."""
# Just importing monkeypatch does the trick - don't remove this line
from kitsune.sumo import monkeypatch
from south.signals import post_migrate
# Courtesy of http://devwithpassion.com/felipe/south-django-permissions/
def update_permissions_after_migration(app, **kwargs):
"""Update app permission just after every migration.
This is based on app django_extensions update_permissions management
command.
"""
from django.conf import settings
from django.db.models import get_app, get_models
from django.contrib.auth.management import create_permissions
create_permissions(
get_app(app), get_models(), 2 if settings.DEBUG else 0)
post_migrate.connect(update_permissions_after_migration)
| Add post_migrate receiver to create permissions | Add post_migrate receiver to create permissions
This should automatically create permissions for new models we add with
South migrations.
| Python | bsd-3-clause | safwanrahman/kitsune,H1ghT0p/kitsune,MziRintu/kitsune,YOTOV-LIMITED/kitsune,NewPresident1/kitsune,silentbob73/kitsune,safwanrahman/kitsune,mythmon/kitsune,feer56/Kitsune2,MziRintu/kitsune,H1ghT0p/kitsune,Osmose/kitsune,feer56/Kitsune2,philipp-sumo/kitsune,philipp-sumo/kitsune,brittanystoroz/kitsune,anushbmx/kitsune,NewPresident1/kitsune,MikkCZ/kitsune,iDTLabssl/kitsune,anushbmx/kitsune,mythmon/kitsune,safwanrahman/linuxdesh,dbbhattacharya/kitsune,asdofindia/kitsune,YOTOV-LIMITED/kitsune,safwanrahman/kitsune,rlr/kitsune,asdofindia/kitsune,H1ghT0p/kitsune,turtleloveshoes/kitsune,orvi2014/kitsune,rlr/kitsune,MikkCZ/kitsune,iDTLabssl/kitsune,silentbob73/kitsune,mozilla/kitsune,turtleloveshoes/kitsune,Osmose/kitsune,chirilo/kitsune,safwanrahman/linuxdesh,silentbob73/kitsune,chirilo/kitsune,philipp-sumo/kitsune,NewPresident1/kitsune,mozilla/kitsune,mythmon/kitsune,safwanrahman/linuxdesh,orvi2014/kitsune,Osmose/kitsune,YOTOV-LIMITED/kitsune,mythmon/kitsune,turtleloveshoes/kitsune,dbbhattacharya/kitsune,feer56/Kitsune1,rlr/kitsune,anushbmx/kitsune,turtleloveshoes/kitsune,orvi2014/kitsune,MziRintu/kitsune,safwanrahman/kitsune,rlr/kitsune,MikkCZ/kitsune,anushbmx/kitsune,asdofindia/kitsune,feer56/Kitsune1,MziRintu/kitsune,brittanystoroz/kitsune,orvi2014/kitsune,iDTLabssl/kitsune,MikkCZ/kitsune,brittanystoroz/kitsune,chirilo/kitsune,H1ghT0p/kitsune,NewPresident1/kitsune,mozilla/kitsune,feer56/Kitsune1,YOTOV-LIMITED/kitsune,Osmose/kitsune,dbbhattacharya/kitsune,mozilla/kitsune,brittanystoroz/kitsune,dbbhattacharya/kitsune,iDTLabssl/kitsune,silentbob73/kitsune,feer56/Kitsune2,feer56/Kitsune2,chirilo/kitsune,asdofindia/kitsune |
f0861ff6c817f1f683e69cf362336545ff3d9148 | ledger/admin.py | ledger/admin.py | from django.contrib import admin
from ledger.models import Account, Entry
class EntryAdmin(admin.ModelAdmin):
list_display = ['date', 'amount', 'details', 'debit_account', 'credit_account']
list_filter = ['date']
admin.site.register(Entry, EntryAdmin)
admin.site.register(Account)
| from django.contrib import admin
from ledger.models import Account, Entry
class EntryAdmin(admin.ModelAdmin):
list_display = ['date', 'amount', 'details', 'debit_account', 'credit_account']
list_filter = ['date', 'debit_account', 'credit_account']
search_fields = ['details', 'debit_account__name', 'credit_account__name']
ordering = ['-date']
admin.site.register(Entry, EntryAdmin)
admin.site.register(Account)
| Add a little more functionality to EntryAdmin | Add a little more functionality to EntryAdmin
| Python | mpl-2.0 | jackbravo/condorest-django,jackbravo/condorest-django,jackbravo/condorest-django |
1e7361f46f551a2e897040ae47b43cdd5263d328 | dataactcore/models/field.py | dataactcore/models/field.py | class FieldType:
""" Acts as an enum for field types """
INTEGER = "INTEGER"
TEXT = "TEXT"
class FieldConstraint:
""" Acts a an enum for field constraints """
NONE = ""
PRIMARY_KEY = "PRIMARY KEY"
NOT_NULL = "NOT NULL" | class FieldType:
""" Acts as an enum for field types """
INTEGER = "INTEGER"
TEXT = "TEXT"
| Remove FieldConstraint class (not used) | Remove FieldConstraint class (not used)
| Python | cc0-1.0 | fedspendingtransparency/data-act-broker-backend,fedspendingtransparency/data-act-broker-backend |
07ee6957d20a1c02b22ed5d91d20211506e7ca54 | partner_feeds/templatetags/partner_feed_tags.py | partner_feeds/templatetags/partner_feed_tags.py | from django import template
from partner_feeds.models import Partner
register = template.Library()
@register.assignment_tag
def get_partners(*args):
partners = []
for name in args:
try:
partner = Partner.objects.get(name=name)
except Partner.DoesNotExist:
continue
partner.posts = partner.post_set.all().order_by('-date')
partners.append(partner)
return partners | from django import template
from partner_feeds.models import Partner, Post
register = template.Library()
@register.assignment_tag
def get_partners(*partner_names):
"""
Given a list of partner names, return those partners with posts attached to
them in the order that they were passed to this function
"""
partners = list(Partner.objects.filter(name__in=partner_names))
for partner in partners:
partner.posts = Post.objects.filter(partner=partner)
partners.sort(key=lambda p: partner_names.index(p.name))
return partners
| Update `get_partners` assignment tag to reduce the number of queries | Update `get_partners` assignment tag to reduce the number of queries
Maintains the same interface so no other changes should be required | Python | bsd-2-clause | theatlantic/django-partner-feeds |
a3022c915e9dae7ee4d36e4d3d85e302ff2f056c | mozcal/urls.py | mozcal/urls.py | from django.conf import settings
from django.conf.urls.defaults import patterns, include
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from funfactory.monkeypatches import patch
patch()
from events.api import EventResource
event_resource = EventResource()
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
urlpatterns = patterns('',
(r'', include('mozcal.events.urls')),
(r'^api/', include(event_resource.urls)),
(r'^admin/', include('mozcal.admin.urls')),
(r'^browserid/', include('django_browserid.urls')),
# Generate a robots.txt
(r'^robots\.txt$', lambda r: HttpResponse(
"User-agent: *\n%s: /" % 'Allow' if settings.ENGAGE_ROBOTS else 'Disallow' ,
mimetype="text/plain")
)
# Uncomment the admin/doc line below to enable admin documentation:
# (r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
# (r'^admin/', include(admin.site.urls)),
)
## In DEBUG mode, serve media files through Django.
if settings.DEBUG:
urlpatterns += staticfiles_urlpatterns()
| from django.conf import settings
from django.conf.urls.defaults import patterns, include
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.http import HttpResponse
from funfactory.monkeypatches import patch
patch()
from events.api import EventResource
event_resource = EventResource()
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
urlpatterns = patterns('',
(r'', include('mozcal.events.urls')),
(r'^api/', include(event_resource.urls)),
(r'^admin/', include('mozcal.admin.urls')),
(r'^browserid/', include('django_browserid.urls')),
# Generate a robots.txt
(r'^robots\.txt$', lambda r: HttpResponse(
"User-agent: *\n%s: /" % 'Allow' if settings.ENGAGE_ROBOTS else 'Disallow' ,
mimetype="text/plain")
)
# Uncomment the admin/doc line below to enable admin documentation:
# (r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
# (r'^admin/', include(admin.site.urls)),
)
## In DEBUG mode, serve media files through Django.
if settings.DEBUG:
urlpatterns += staticfiles_urlpatterns()
| Add missing import for HttpResponse | Add missing import for HttpResponse
| Python | bsd-3-clause | yvan-sraka/wprevents,ppapadeas/wprevents,ppapadeas/wprevents,ppapadeas/wprevents,yvan-sraka/wprevents,yvan-sraka/wprevents,yvan-sraka/wprevents |
7dbc1359ea4fb1b725fd53869a218856e4dec701 | lswapi/httpie/__init__.py | lswapi/httpie/__init__.py | """
LswApi auth plugin for HTTPie.
"""
from json import loads, dumps
from time import time
from os import path
from lswapi import __auth_token_url__, __token_store__, fetch_access_token
from requests import post
from httpie.plugins import AuthPlugin
class LswApiAuth(object):
def __init__(self, client_id, client_secret):
self.client_id = client_id
self.client_secret = client_secret
def __call__(self, r):
if path.exists(__token_store__):
with open(__token_store__, 'r') as file:
token = loads(file.read())
if 'expires_at' in token and token['expires_at'] > time():
r.headers['Authorization'] = '{token_type} {access_token}'.format(**token)
return r
token = fetch_access_token(self.client_id, self.client_secret, __auth_token_url__)
with open(__token_store__, 'w') as file:
file.write(dumps(token))
r.headers['Authorization'] = '{token_type} {access_token}'.format(**token)
return r
class ApiAuthPlugin(AuthPlugin):
name = 'LswApi Oauth'
auth_type = 'lswapi'
description = 'LeaseWeb Api Oauth Authentication'
def get_auth(self, username, password):
return LswApiAuth(username, password)
| """
LswApi auth plugin for HTTPie.
"""
from json import loads, dumps
from time import time
from os import path
from lswapi import __auth_token_url__, __token_store__, fetch_access_token
from requests import post
from httpie.plugins import AuthPlugin
class LswApiAuth(object):
def __init__(self, client_id, client_secret):
self.client_id = client_id
self.client_secret = client_secret
def __call__(self, r):
if path.exists(__token_store__):
with open(__token_store__, 'r') as file:
token = loads(file.read())
if 'expires_at' in token and token['expires_at'] > time():
r.headers['Authorization'] = '{token_type} {access_token}'.format(**token)
return r
token = fetch_access_token(__auth_token_url__, self.client_id, self.client_secret)
with open(__token_store__, 'w') as file:
file.write(dumps(token))
r.headers['Authorization'] = '{token_type} {access_token}'.format(**token)
return r
class ApiAuthPlugin(AuthPlugin):
name = 'LswApi Oauth'
auth_type = 'lswapi'
description = 'LeaseWeb Api Oauth Authentication'
def get_auth(self, username, password):
return LswApiAuth(username, password)
| Fix for function signature change in 0.4.0 in fetch_access_token | Fix for function signature change in 0.4.0 in fetch_access_token
| Python | apache-2.0 | nrocco/lswapi |
c0ec6a6a799ab86562b07326eeaf21da4fd23dff | rejected/log.py | rejected/log.py | """
Logging Related Things
"""
import logging
class CorrelationFilter(logging.Formatter):
"""Filter records that have a correlation_id"""
def __init__(self, exists=None):
super(CorrelationFilter, self).__init__()
self.exists = exists
def filter(self, record):
if self.exists:
return hasattr(record, 'correlation_id')
return not hasattr(record, 'correlation_id')
class CorrelationAdapter(logging.LoggerAdapter):
"""A LoggerAdapter that appends the a correlation ID to the message
record properties.
"""
def __init__(self, logger, consumer):
self.logger = logger
self.consumer = consumer
def process(self, msg, kwargs):
"""Process the logging message and keyword arguments passed in to
a logging call to insert contextual information.
:param str msg: The message to process
:param dict kwargs: The kwargs to append
:rtype: (str, dict)
"""
kwargs['extra'] = {'correlation_id': self.consumer.correlation_id}
return msg, kwargs
| """
Logging Related Things
"""
import logging
class CorrelationFilter(logging.Formatter):
"""Filter records that have a correlation_id"""
def __init__(self, exists=None):
super(CorrelationFilter, self).__init__()
self.exists = exists
def filter(self, record):
if self.exists:
return hasattr(record, 'correlation_id')
return not hasattr(record, 'correlation_id')
class CorrelationAdapter(logging.LoggerAdapter):
"""A LoggerAdapter that appends the a correlation ID to the message
record properties.
"""
def __init__(self, logger, consumer):
self.logger = logger
self.consumer = consumer
def process(self, msg, kwargs):
"""Process the logging message and keyword arguments passed in to
a logging call to insert contextual information.
:param str msg: The message to process
:param dict kwargs: The kwargs to append
:rtype: (str, dict)
"""
kwargs['extra'] = {'correlation_id': self.consumer.correlation_id,
'consumer': self.consumer.name}
return msg, kwargs
| Add the consumer name to the extra values | Add the consumer name to the extra values
| Python | bsd-3-clause | gmr/rejected,gmr/rejected |
63db2005911abae96eb170af0dd93093cbfeae38 | nimp/utilities/ue4.py | nimp/utilities/ue4.py | # -*- coding: utf-8 -*-
#-------------------------------------------------------------------------------
import socket
import random
import string
import time
import contextlib
import shutil
import os
from nimp.utilities.build import *
from nimp.utilities.deployment import *
#---------------------------------------------------------------------------
def ue4_build(env):
vs_version = '12'
vcxproj = 'Engine/Intermediate/ProjectFiles/' + env.game + '.vcxproj'
if _ue4_generate_project() != 0:
log_error("[nimp] Error generating UE4 project files")
return False
if env.ue4_build_platform == 'PS4':
ps4_vcxproj = 'Engine/Intermediate/ProjectFiles/PS4MapFileUtil.vcxproj'
if not _ue4_build_project(env.solution, ps4_vcxproj, 'Win64',
env.configuration, vs_version, 'Build'):
log_error("[nimp] Could not build PS4MapFileUtil.exe")
return False
return _ue4_build_project(env.solution, vcxproj, env.ue4_build_platform,
env.configuration, vs_version, 'Build')
#---------------------------------------------------------------------------
def _ue4_generate_project():
return call_process('.', ['./GenerateProjectFiles.bat'])
#---------------------------------------------------------------------------
def _ue4_build_project(sln_file, project, build_platform, configuration, vs_version, target = 'Rebuild'):
return vsbuild(sln_file, build_platform, configuration, project, vs_version, target)
| # -*- coding: utf-8 -*-
#-------------------------------------------------------------------------------
import socket
import random
import string
import time
import contextlib
import shutil
import os
from nimp.utilities.build import *
from nimp.utilities.deployment import *
#---------------------------------------------------------------------------
def ue4_build(env):
vs_version = '12'
if _ue4_generate_project() != 0:
log_error("[nimp] Error generating UE4 project files")
return False
if env.ue4_build_platform == 'PS4':
if not _ue4_build_project(env.solution, 'PS4MapFileUtil', 'Win64',
env.configuration, vs_version, 'Build'):
log_error("[nimp] Could not build PS4MapFileUtil.exe")
return False
return _ue4_build_project(env.solution, env.game, env.ue4_build_platform,
env.configuration, vs_version, 'Build')
#---------------------------------------------------------------------------
def _ue4_generate_project():
return call_process('.', ['./GenerateProjectFiles.bat'])
#---------------------------------------------------------------------------
def _ue4_build_project(sln_file, project, build_platform, configuration, vs_version, target = 'Rebuild'):
return vsbuild(sln_file, build_platform, configuration, project, vs_version, target)
| Build UE4 projects by name rather than by full path. | Build UE4 projects by name rather than by full path.
| Python | mit | dontnod/nimp |
11cb3adf0beb19abebbf8345b9244dbcc0f51ca7 | autopoke.py | autopoke.py | #!/bin/env python
from selenium import webdriver
from selenium.common.exceptions import StaleElementReferenceException
from time import sleep
from getpass import getpass
if __name__ == '__main__':
driver = webdriver.phantomjs.webdriver.WebDriver()
driver.get('https://facebook.com')
driver.find_element_by_id('email').send_keys(input('Email: '))
driver.find_element_by_id('pass').send_keys(getpass())
driver.find_element_by_id('loginbutton').click()
driver.get('https://facebook.com/pokes/')
assert "Forgot password?" not in driver.page_source
c = 0
c2 = 0
while True:
try:
for i in driver.find_elements_by_link_text("Poke Back"):
i.click()
c += 1
print("Clicked so far: " + str(c))
except StaleElementReferenceException:
driver.get('https://facebook.com/pokes/')
sleep(0.5)
| #!/bin/env python
from selenium import webdriver
from selenium.common.exceptions import StaleElementReferenceException
from time import sleep
from getpass import getpass
if __name__ == '__main__':
driver = webdriver.phantomjs.webdriver.WebDriver()
driver.get('https://facebook.com')
driver.find_element_by_id('email').send_keys(input('Email: '))
driver.find_element_by_id('pass').send_keys(getpass())
driver.find_element_by_id('loginbutton').click()
driver.get('https://facebook.com/pokes/')
assert "Forgot password?" not in driver.page_source
c = 0
while True:
try:
for i in driver.find_elements_by_link_text("Poke Back"):
i.click()
c += 1
print("Clicked so far: " + str(c))
except StaleElementReferenceException:
print("Found exception, reloading page")
driver.get('https://facebook.com/pokes/')
sleep(0.5)
| Add notice on page reload | Add notice on page reload
| Python | mit | matthewbentley/autopoke |
ccfc5e8681eef5e382b6c31abce540cbe179f7b2 | tests/factories/user.py | tests/factories/user.py | # -*- coding: utf-8 -*-
# Copyright (c) 2016 The Pycroft Authors. See the AUTHORS file.
# This file is part of the Pycroft project and licensed under the terms of
# the Apache License, Version 2.0. See the LICENSE file for details.
import factory
from factory.faker import Faker
from pycroft.model.user import User, RoomHistoryEntry
from .base import BaseFactory
from .facilities import RoomFactory
from .finance import AccountFactory
class UserFactory(BaseFactory):
class Meta:
model = User
login = Faker('user_name')
name = Faker('name')
registered_at = Faker('date_time')
password = Faker('password')
email = Faker('email')
account = factory.SubFactory(AccountFactory, type="USER_ASSET")
room = factory.SubFactory(RoomFactory)
address = factory.SelfAttribute('room.address')
@factory.post_generation
def room_history_entries(self, create, extracted, **kwargs):
if self.room is not None:
# Set room history entry begin to registration date
rhe = RoomHistoryEntry.q.filter_by(user=self, room=self.room).one()
rhe.begins_at = self.registered_at
class UserWithHostFactory(UserFactory):
host = factory.RelatedFactory('tests.factories.host.HostFactory', 'owner')
class UserWithMembershipFactory(UserFactory):
membership = factory.RelatedFactory('tests.factories.property.MembershipFactory', 'user')
| # -*- coding: utf-8 -*-
# Copyright (c) 2016 The Pycroft Authors. See the AUTHORS file.
# This file is part of the Pycroft project and licensed under the terms of
# the Apache License, Version 2.0. See the LICENSE file for details.
import factory
from factory.faker import Faker
from pycroft.model.user import User, RoomHistoryEntry
from .base import BaseFactory
from .facilities import RoomFactory
from .finance import AccountFactory
class UserFactory(BaseFactory):
class Meta:
model = User
login = Faker('user_name')
name = Faker('name')
registered_at = Faker('date_time')
password = Faker('password')
email = Faker('email')
account = factory.SubFactory(AccountFactory, type="USER_ASSET")
room = factory.SubFactory(RoomFactory)
address = factory.SelfAttribute('room.address')
@factory.post_generation
def room_history_entries(self, create, extracted, **kwargs):
if self.room is not None:
# Set room history entry begin to registration date
rhe = RoomHistoryEntry.q.filter_by(user=self, room=self.room).one()
rhe.begins_at = self.registered_at
for key, value in kwargs.items():
setattr(rhe, key, value)
class UserWithHostFactory(UserFactory):
host = factory.RelatedFactory('tests.factories.host.HostFactory', 'owner')
class UserWithMembershipFactory(UserFactory):
membership = factory.RelatedFactory('tests.factories.property.MembershipFactory', 'user')
| Allow adjusting of RoomHistoryEntry attributes in UserFactory | Allow adjusting of RoomHistoryEntry attributes in UserFactory
| Python | apache-2.0 | agdsn/pycroft,agdsn/pycroft,agdsn/pycroft,agdsn/pycroft,agdsn/pycroft |
e86f62edb2edf9dd5d20eb2bf89b09c76807de50 | tests/cupy_tests/core_tests/test_array_function.py | tests/cupy_tests/core_tests/test_array_function.py | import unittest
import numpy
import six
import cupy
from cupy import testing
@testing.gpu
class TestArrayFunction(unittest.TestCase):
@testing.with_requires('numpy>=1.17.0')
def test_array_function(self):
a = numpy.random.randn(100, 100)
a_cpu = numpy.asarray(a)
a_gpu = cupy.asarray(a)
# The numpy call for both CPU and GPU arrays is intentional to test the
# __array_function__ protocol
qr_cpu = numpy.linalg.qr(a_cpu)
qr_gpu = numpy.linalg.qr(a_gpu)
if isinstance(qr_cpu, tuple):
for b_cpu, b_gpu in six.moves.zip(qr_cpu, qr_gpu):
self.assertEqual(b_cpu.dtype, b_gpu.dtype)
cupy.testing.assert_allclose(b_cpu, b_gpu, atol=1e-4)
else:
self.assertEqual(qr_cpu.dtype, qr_gpu.dtype)
cupy.testing.assert_allclose(qr_cpu, qr_gpu, atol=1e-4)
| import unittest
import numpy
import six
import cupy
from cupy import testing
@testing.gpu
class TestArrayFunction(unittest.TestCase):
@testing.with_requires('numpy>=1.17.0')
def test_array_function(self):
a = numpy.random.randn(100, 100)
a_cpu = numpy.asarray(a)
a_gpu = cupy.asarray(a)
# The numpy call for both CPU and GPU arrays is intentional to test the
# __array_function__ protocol
qr_cpu = numpy.linalg.qr(a_cpu)
qr_gpu = numpy.linalg.qr(a_gpu)
if isinstance(qr_cpu, tuple):
for b_cpu, b_gpu in six.moves.zip(qr_cpu, qr_gpu):
self.assertEqual(b_cpu.dtype, b_gpu.dtype)
cupy.testing.assert_allclose(b_cpu, b_gpu, atol=1e-4)
else:
self.assertEqual(qr_cpu.dtype, qr_gpu.dtype)
cupy.testing.assert_allclose(qr_cpu, qr_gpu, atol=1e-4)
@testing.numpy_cupy_equal()
def test_array_function_can_cast(self, xp):
return numpy.can_cast(xp.arange(2), 'f4')
@testing.numpy_cupy_equal()
def test_array_function_common_type(self, xp):
return numpy.common_type(xp.arange(2, dtype='f8'), xp.arange(2, dtype='f4'))
@testing.numpy_cupy_equal()
def test_array_function_result_type(self, xp):
return numpy.result_type(3, xp.arange(2, dtype='f8'))
| Add tests for NumPy _implementation usage | Add tests for NumPy _implementation usage
| Python | mit | cupy/cupy,cupy/cupy,cupy/cupy,cupy/cupy |
76a561db119874b17e9a266682e4e4505ba7ff50 | tools/perf/benchmarks/webrtc.py | tools/perf/benchmarks/webrtc.py | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from core import perf_benchmark
from measurements import webrtc
from telemetry import benchmark
import page_sets
@benchmark.Disabled('reference')
class WebRTC(perf_benchmark.PerfBenchmark):
"""Obtains WebRTC metrics for a real-time video tests."""
test = webrtc.WebRTC
page_set = page_sets.WebrtcCasesPageSet
@classmethod
def Name(cls):
return 'webrtc.webrtc_cases'
| # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from core import perf_benchmark
from measurements import webrtc
from telemetry import benchmark
import page_sets
# Disabled because the reference set becomes flaky with the new
# https:// page set introduced in http://crbug.com/523517.
# Try removing once the Chrome used for ref builds advances
# past blink commit pos 200986.
@benchmark.Disabled('reference')
class WebRTC(perf_benchmark.PerfBenchmark):
"""Obtains WebRTC metrics for a real-time video tests."""
test = webrtc.WebRTC
page_set = page_sets.WebrtcCasesPageSet
@classmethod
def Name(cls):
return 'webrtc.webrtc_cases'
| Add a comment saying why the WebRTC ref page set was disabled. | Add a comment saying why the WebRTC ref page set was disabled.
BUG=523517
CQ_EXTRA_TRYBOTS=tryserver.chromium.perf:linux_perf_bisect;tryserver.chromium.perf:mac_perf_bisect;tryserver.chromium.perf:win_perf_bisect;tryserver.chromium.perf:android_nexus5_perf_bisect
Review URL: https://codereview.chromium.org/1320333003
Cr-Commit-Position: 972c6d2dc6dd5efdad1377c0d224e03eb8f276f7@{#346388}
| Python | bsd-3-clause | CapOM/ChromiumGStreamerBackend,ltilve/ChromiumGStreamerBackend,CapOM/ChromiumGStreamerBackend,CapOM/ChromiumGStreamerBackend,ltilve/ChromiumGStreamerBackend,CapOM/ChromiumGStreamerBackend,ltilve/ChromiumGStreamerBackend,CapOM/ChromiumGStreamerBackend,ltilve/ChromiumGStreamerBackend,ltilve/ChromiumGStreamerBackend,CapOM/ChromiumGStreamerBackend,ltilve/ChromiumGStreamerBackend,ltilve/ChromiumGStreamerBackend,CapOM/ChromiumGStreamerBackend,ltilve/ChromiumGStreamerBackend,CapOM/ChromiumGStreamerBackend,CapOM/ChromiumGStreamerBackend,ltilve/ChromiumGStreamerBackend |
caa2ee0ec42b02c53fc9bd980fa088075d6d50d3 | base/components/social/youtube/admin.py | base/components/social/youtube/admin.py | from django.contrib import admin
from .models import Channel, Thumbnail, Video
class VideoInline(admin.StackedInline):
extra = 1
model = Video
class ThumbnailInline(admin.TabularInline):
extra = 1
model = Thumbnail
class ChannelAdmin(admin.ModelAdmin):
fieldsets = (
(None, {'fields': ('username',)}),
('Relations', {'fields': ('idol', 'group')}),
)
inlines = [VideoInline]
list_display = ['username', 'idol', 'group']
list_select_related = True
raw_id_fields = ('idol', 'group',)
autocomplete_lookup_fields = {'fk': ['idol', 'group']}
admin.site.register(Channel, ChannelAdmin)
class VideoAdmin(admin.ModelAdmin):
fieldsets = (
(None, {'fields': ('channel', 'ytid')}),
(None, {'fields': ('title', 'description', 'published', 'duration')}),
)
inlines = [ThumbnailInline]
list_display = ['title', 'channel', 'published', 'duration', 'ytid']
list_select_related = True
raw_id_fields = ('channel',)
autocomplete_lookup_fields = {'fk': ['channel']}
admin.site.register(Video, VideoAdmin)
| from django.contrib import admin
from .models import Channel, Thumbnail, Video
class VideoInline(admin.StackedInline):
extra = 1
model = Video
class ThumbnailInline(admin.TabularInline):
extra = 1
model = Thumbnail
class ChannelAdmin(admin.ModelAdmin):
fieldsets = (
(None, {'fields': ('username', 'ytid')}),
('Relations', {'fields': ('idol', 'group')}),
)
inlines = [VideoInline]
list_display = ['username', 'idol', 'group']
list_select_related = True
raw_id_fields = ('idol', 'group',)
autocomplete_lookup_fields = {'fk': ['idol', 'group']}
admin.site.register(Channel, ChannelAdmin)
class VideoAdmin(admin.ModelAdmin):
fieldsets = (
(None, {'fields': ('channel', 'ytid')}),
(None, {'fields': ('title', 'description', 'published', 'duration')}),
)
inlines = [ThumbnailInline]
list_display = ['title', 'channel', 'published', 'duration', 'ytid']
list_select_related = True
raw_id_fields = ('channel',)
autocomplete_lookup_fields = {'fk': ['channel']}
admin.site.register(Video, VideoAdmin)
| Add ytid to the ChannelAdmin. | Add ytid to the ChannelAdmin.
| Python | apache-2.0 | hello-base/web,hello-base/web,hello-base/web,hello-base/web |
a9a2c13cf947de9bc8ed50a38da5f7191b86ae23 | accounts/tests/test_views.py | accounts/tests/test_views.py | """accounts app unittests for views
"""
from django.test import TestCase
from django.urls import reverse
class WelcomePageTest(TestCase):
"""Tests relating to the welcome_page view.
"""
def test_uses_welcome_template(self):
"""The root url should respond with the welcome page template.
"""
response = self.client.get('/')
self.assertTemplateUsed(response, 'accounts/welcome.html')
class SendLoginEmailTest(TestCase):
"""Tests for the view which sends the login email.
"""
def setUp(self):
self.url = reverse('send_login_email')
self.test_email = 'newvisitor@example.com'
def test_uses_emailsent_template(self):
"""The send_login_email url responds with login_email_sent template.
"""
response = self.client.post(self.url, data={'email': self.test_email})
self.assertTemplateUsed(response, 'accounts/login_email_sent.html')
def test_get_request_yields_405(self):
"""Accessing the view via get request is not allowed.
"""
response = self.client.get(self.url)
self.assertEqual(response.status_code, 405)
| """accounts app unittests for views
"""
from django.test import TestCase
from django.core import mail
from django.urls import reverse
class WelcomePageTest(TestCase):
"""Tests relating to the welcome_page view.
"""
def test_uses_welcome_template(self):
"""The root url should respond with the welcome page template.
"""
response = self.client.get('/')
self.assertTemplateUsed(response, 'accounts/welcome.html')
class SendLoginEmailTest(TestCase):
"""Tests for the view which sends the login email.
"""
def setUp(self):
self.url = reverse('send_login_email')
self.test_email = 'newvisitor@example.com'
def test_uses_emailsent_template(self):
"""The send_login_email url responds with login_email_sent template.
"""
response = self.client.post(self.url, data={'email': self.test_email})
self.assertTemplateUsed(response, 'accounts/login_email_sent.html')
def test_get_request_yields_405(self):
"""Accessing the view via get request is not allowed.
"""
response = self.client.get(self.url)
self.assertEqual(response.status_code, 405)
def test_view_sends_token_email(self):
"""The view should send an email to the email address from post.
"""
self.client.post(self.url, data={'email': self.test_email})
self.assertEqual(len(mail.outbox), 1)
| Add trivial test for the view to send an email | Add trivial test for the view to send an email
| Python | mit | randomic/aniauth-tdd,randomic/aniauth-tdd |
018f8e7c7c69eefeb121c8552eb319b4b550f251 | backslash/error_container.py | backslash/error_container.py | from sentinels import NOTHING
class ErrorContainer(object):
def add_error(self, exception, exception_type, traceback, timestamp=NOTHING):
return self.client.api.call_function('add_error', {self._get_id_key(): self.id,
'exception': exception,
'exception_type': exception_type,
'traceback': traceback,
'timestamp': timestamp
})
def _get_id_key(self):
if type(self).__name__ == 'Test':
return 'test_id'
return 'session_id'
| from sentinels import NOTHING
class ErrorContainer(object):
def add_error(self, message, exception_type=NOTHING, traceback=NOTHING, timestamp=NOTHING):
return self.client.api.call_function('add_error', {self._get_id_key(): self.id,
'message': message,
'exception_type': exception_type,
'traceback': traceback,
'timestamp': timestamp
})
def _get_id_key(self):
if type(self).__name__ == 'Test':
return 'test_id'
return 'session_id'
| Unify errors and failures in API | Unify errors and failures in API
| Python | bsd-3-clause | vmalloc/backslash-python,slash-testing/backslash-python |
75a27c416effd2958182b1401e49d6613a28857d | sana_builder/webapp/models.py | sana_builder/webapp/models.py | from django.db import models
from django.contrib.auth.models import User
class Procedure(models.Model):
title = models.CharField(max_length=50)
author = models.CharField(max_length=50)
uuid = models.IntegerField(null=True)
version = models.CharField(max_length=50, null=True)
owner = models.ForeignKey(User, unique=True)
class Page(models.Model):
procedure = models.ForeignKey(Procedure)
| from django.db import models
from django.contrib.auth.models import User
class Procedure(models.Model):
title = models.CharField(max_length=50)
author = models.CharField(max_length=50)
uuid = models.IntegerField(null=True, unique=True)
version = models.CharField(max_length=50, null=True)
owner = models.ForeignKey(User, unique=True)
class Page(models.Model):
procedure = models.ForeignKey(Procedure)
| Make uuid on procedures unique | Make uuid on procedures unique
| Python | bsd-3-clause | SanaMobile/sana.protocol_builder,SanaMobile/sana.protocol_builder,SanaMobile/sana.protocol_builder,SanaMobile/sana.protocol_builder,SanaMobile/sana.protocol_builder |
ad1203b9b93d1be499698807e2307413c20bb573 | cisco_olt_http/tests/test_operations.py | cisco_olt_http/tests/test_operations.py |
from cisco_olt_http import operations
|
from cisco_olt_http import operations
from cisco_olt_http.client import Client
def test_get_data():
client = Client('http://base-url')
show_equipment_op = operations.ShowEquipmentOp(client)
op_data = show_equipment_op.get_data()
assert op_data
| Add simple test for operation get_data | Add simple test for operation get_data
| Python | mit | beezz/cisco-olt-http-client,Vnet-as/cisco-olt-http-client |
f3eeb19249fae51a5537735cd5966596194cdc36 | pages/widgets_registry.py | pages/widgets_registry.py | __all__ = ('register_widget',)
from django.utils.translation import ugettext as _
class WidgetAlreadyRegistered(Exception):
"""
An attempt was made to register a widget for Django page CMS more
than once.
"""
pass
class WidgetNotFound(Exception):
"""
The requested widget was not found
"""
pass
registry = []
def register_widget(widget):
if widget in registry:
raise AlreadyRegistered(
_('The widget %s has already been registered.') % widget.__name__)
registry.append(widget)
def get_widget(name):
for widget in registry:
if widget.__name__ == name:
return widget
raise WidgetNotFound(
_('The widget %s has not been registered.') % name) | __all__ = ('register_widget',)
from django.utils.translation import ugettext as _
class WidgetAlreadyRegistered(Exception):
"""
An attempt was made to register a widget for Django page CMS more
than once.
"""
pass
class WidgetNotFound(Exception):
"""
The requested widget was not found
"""
pass
registry = []
def register_widget(widget):
if widget in registry:
raise WidgetAlreadyRegistered(_('The widget %s has already been registered.') % widget.__name__)
registry.append(widget)
def get_widget(name):
for widget in registry:
if widget.__name__ == name:
return widget
raise WidgetNotFound(
_('The widget %s has not been registered.') % name) | Fix widget registry exception handling code | Fix widget registry exception handling code
| Python | bsd-3-clause | batiste/django-page-cms,remik/django-page-cms,batiste/django-page-cms,oliciv/django-page-cms,oliciv/django-page-cms,akaihola/django-page-cms,remik/django-page-cms,remik/django-page-cms,akaihola/django-page-cms,oliciv/django-page-cms,pombredanne/django-page-cms-1,batiste/django-page-cms,akaihola/django-page-cms,pombredanne/django-page-cms-1,pombredanne/django-page-cms-1,remik/django-page-cms |
3ac86b4c058f920c9ec774c192d84050d61c8cc3 | tests/__init__.py | tests/__init__.py | # -*- coding: utf-8 -*-
import os
from hycc.util import hycc_main
def clean():
for path in os.listdir("tests/resources"):
if path not in ["hello.hy", "__init__.py"]:
os.remove(os.path.join("tests/resources", path))
def test_build_executable():
hycc_main("tests/resources/hello.hy".split())
assert os.path.exists("tests/resources/hello")
clean()
def test_shared_library():
hycc_main("tests/resources/hello.hy --shared".split())
from tests.resources.hello import hello
assert hello() == "hello"
clean()
| # -*- coding: utf-8 -*-
import os
from hycc.util import hycc_main
def clean():
for path in os.listdir("tests/resources"):
if path not in ["hello.hy", "__init__.py"]:
path = os.path.join("tests/resources", path)
if os.path.isdir(path):
os.rmdir(path)
else:
os.remove(path)
def test_build_executable():
hycc_main("tests/resources/hello.hy".split())
assert os.path.exists("tests/resources/hello")
clean()
def test_shared_library():
hycc_main("tests/resources/hello.hy --shared".split())
from tests.resources.hello import hello
assert hello() == "hello"
clean()
| Fix bug; os.remove cannot remove directories | Fix bug; os.remove cannot remove directories
| Python | mit | koji-kojiro/hylang-hycc |
f4cfad2edaa896b471f4f44b2a3fda2bd6b1bb49 | tests/conftest.py | tests/conftest.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import pytest
from flask import Flask, jsonify
@pytest.fixture
def app():
app = Flask(__name__)
@app.route('/ping')
def ping():
return jsonify(ping='pong')
return app
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
import pytest
from flask import Flask, jsonify
@pytest.fixture
def app():
app = Flask(__name__)
@app.route('/')
def index():
return app.response_class('OK')
@app.route('/ping')
def ping():
return jsonify(ping='pong')
return app
| Add index route to test application | Add index route to test application
This endpoint uses to start :class:`LiveServer` instance with minimum
waiting timeout.
| Python | mit | amateja/pytest-flask |
dff2120a65daacfb1add8da604483f354abcefa2 | src/pygrapes/serializer/__init__.py | src/pygrapes/serializer/__init__.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from abstract import Abstract
from json import Json
from msgpack import MsgPack
__all__ = ['Abstract', 'Json', 'MsgPack']
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
from pygrapes.serializer.abstract import Abstract
from pygrapes.serializer.json import Json
from pygrapes.serializer.msgpack import MsgPack
__all__ = ['Abstract', 'Json', 'MsgPack']
| Load resources by absolute path not relative | Load resources by absolute path not relative
| Python | bsd-3-clause | michalbachowski/pygrapes,michalbachowski/pygrapes,michalbachowski/pygrapes |
e3d968c2c477a764e7a5b515f3109dbd83f487f7 | django_channels_jsonrpc/django_channels_jsonrpc/consumer.py | django_channels_jsonrpc/django_channels_jsonrpc/consumer.py | from channels.generic.websockets import WebsocketConsumer, JsonWebsocketConsumer
from .jsonrpcwebsocketconsumer import JsonRpcWebsocketConsumer
class MyJsonRpcWebsocketConsumer(JsonRpcWebsocketConsumer):
# Set to True if you want them, else leave out
strict_ordering = False
slight_ordering = False
def connection_groups(self, **kwargs):
"""
Called to return the list of groups to automatically add/remove
this connection to/from.
"""
return ["test"]
def receive(self, content, **kwargs):
"""
Called when a message is received with decoded JSON content
"""
# Simple echo
print "received: %s" % content
print "kwargs %s" % kwargs
self.send(content)
def disconnect(self, message, **kwargs):
"""
Perform things on connection close
"""
print "disconnect"
@MyJsonRpcWebsocketConsumer.rpc_method()
def ping():
return "pong" | from channels.generic.websockets import WebsocketConsumer, JsonWebsocketConsumer
from .jsonrpcwebsocketconsumer import JsonRpcWebsocketConsumer
class MyJsonRpcWebsocketConsumer(JsonRpcWebsocketConsumer):
# Set to True if you want them, else leave out
strict_ordering = False
slight_ordering = False
def connection_groups(self, **kwargs):
"""
Called to return the list of groups to automatically add/remove
this connection to/from.
"""
return ["test"]
def receive(self, content, **kwargs):
"""
Called when a message is received with decoded JSON content
"""
# Simple echo
print("received: %s" % content)
print("kwargs %s" % kwargs)
self.send(content)
def disconnect(self, message, **kwargs):
"""
Perform things on connection close
"""
print("disconnect")
@MyJsonRpcWebsocketConsumer.rpc_method()
def ping():
return "pong" | Print statements updated to be compatible with Python 3. | Print statements updated to be compatible with Python 3.
| Python | mit | millerf/django-channels-jsonrpc |
6f24da8cdcd0ad22cad0cee4e9864de6e695ab6d | packages/Python/lldbsuite/test/functionalities/command_script/import/thepackage/__init__.py | packages/Python/lldbsuite/test/functionalities/command_script/import/thepackage/__init__.py | import TPunitA
import TPunitB
def __lldb_init_module(debugger,*args):
debugger.HandleCommand("command script add -f thepackage.TPunitA.command TPcommandA")
debugger.HandleCommand("command script add -f thepackage.TPunitB.command TPcommandB")
| from __future__ import absolute_import
from . import TPunitA
from . import TPunitB
def __lldb_init_module(debugger,*args):
debugger.HandleCommand("command script add -f thepackage.TPunitA.command TPcommandA")
debugger.HandleCommand("command script add -f thepackage.TPunitB.command TPcommandB")
| Fix TestImport.py to work with Python 3.5. | Fix TestImport.py to work with Python 3.5.
Differential Revision: http://reviews.llvm.org/D16431
git-svn-id: 4c4cc70b1ef44ba2b7963015e681894188cea27e@258448 91177308-0d34-0410-b5e6-96231b3b80d8
| Python | apache-2.0 | apple/swift-lldb,llvm-mirror/lldb,apple/swift-lldb,apple/swift-lldb,llvm-mirror/lldb,apple/swift-lldb,apple/swift-lldb,apple/swift-lldb,llvm-mirror/lldb,llvm-mirror/lldb,llvm-mirror/lldb |
f203602b5c5e901f54895c1872becf0b48438628 | src/pluggy/_result.py | src/pluggy/_result.py | """
Hook wrapper "result" utilities.
"""
import sys
def _raise_wrapfail(wrap_controller, msg):
co = wrap_controller.gi_code
raise RuntimeError(
"wrap_controller at %r %s:%d %s"
% (co.co_name, co.co_filename, co.co_firstlineno, msg)
)
class HookCallError(Exception):
""" Hook was called wrongly. """
class _Result(object):
def __init__(self, result, excinfo):
self._result = result
self._excinfo = excinfo
@property
def excinfo(self):
return self._excinfo
@classmethod
def from_call(cls, func):
__tracebackhide__ = True
result = excinfo = None
try:
result = func()
except BaseException:
excinfo = sys.exc_info()
return cls(result, excinfo)
def force_result(self, result):
"""Force the result(s) to ``result``.
If the hook was marked as a ``firstresult`` a single value should
be set otherwise set a (modified) list of results. Any exceptions
found during invocation will be deleted.
"""
self._result = result
self._excinfo = None
def get_result(self):
"""Get the result(s) for this hook call.
If the hook was marked as a ``firstresult`` only a single value
will be returned otherwise a list of results.
"""
__tracebackhide__ = True
if self._excinfo is None:
return self._result
else:
ex = self._excinfo
raise ex[1].with_traceback(ex[2])
| """
Hook wrapper "result" utilities.
"""
import sys
def _raise_wrapfail(wrap_controller, msg):
co = wrap_controller.gi_code
raise RuntimeError(
"wrap_controller at %r %s:%d %s"
% (co.co_name, co.co_filename, co.co_firstlineno, msg)
)
class HookCallError(Exception):
""" Hook was called wrongly. """
class _Result:
def __init__(self, result, excinfo):
self._result = result
self._excinfo = excinfo
@property
def excinfo(self):
return self._excinfo
@classmethod
def from_call(cls, func):
__tracebackhide__ = True
result = excinfo = None
try:
result = func()
except BaseException:
excinfo = sys.exc_info()
return cls(result, excinfo)
def force_result(self, result):
"""Force the result(s) to ``result``.
If the hook was marked as a ``firstresult`` a single value should
be set otherwise set a (modified) list of results. Any exceptions
found during invocation will be deleted.
"""
self._result = result
self._excinfo = None
def get_result(self):
"""Get the result(s) for this hook call.
If the hook was marked as a ``firstresult`` only a single value
will be returned otherwise a list of results.
"""
__tracebackhide__ = True
if self._excinfo is None:
return self._result
else:
ex = self._excinfo
raise ex[1].with_traceback(ex[2])
| Remove explicit inheritance from object | Remove explicit inheritance from object
| Python | mit | pytest-dev/pluggy,RonnyPfannschmidt/pluggy,hpk42/pluggy,pytest-dev/pluggy,RonnyPfannschmidt/pluggy |
caa72e132d2d25fd1e67bfeee88c7498e7e52f21 | analysis/count-markers.py | analysis/count-markers.py | #!/usr/bin/env python
import climate
import collections
import joblib
import database
def count(trial):
trial.load()
trial.reindex(100)
trial.mask_dropouts()
markers = []
for m in trial.marker_columns:
s = trial[m + '-c']
if s.count() > 0.01 * len(s):
markers.append(m)
return markers
def main(root):
trials = database.Experiment(root).trials_matching('*')
counts = collections.defaultdict(int)
f = joblib.delayed(count)
for markers in joblib.Parallel(-1)(f(t) for t in trials):
for m in markers:
counts[m] += 1
for m, c in sorted(counts.items(), key=lambda x: -x[1]):
print(c, m)
if __name__ == '__main__':
climate.call(main)
| #!/usr/bin/env python
import climate
import collections
import joblib
import database
def count(trial):
trial.load()
trial.reindex(100)
trial.mask_dropouts()
markers = []
for m in trial.marker_columns:
s = trial.df[m + '-c']
if s.count() > 0.1 * len(s):
markers.append(m)
return markers
def main(root):
trials = database.Experiment(root).trials_matching('*')
counts = collections.defaultdict(int)
f = joblib.delayed(count)
for markers in joblib.Parallel(-1)(f(t) for t in trials):
for m in markers:
counts[m] += 1
for m, c in sorted(counts.items(), key=lambda x: -x[1]):
print(c, m)
if __name__ == '__main__':
climate.call(main)
| Increase fraction of non-dropout frames required. | Increase fraction of non-dropout frames required.
| Python | mit | lmjohns3/cube-experiment,lmjohns3/cube-experiment,lmjohns3/cube-experiment |
ff37a13d1adec1fe685bd48964ab50ef000f53f5 | loom/config.py | loom/config.py | from fabric.api import env, run, sudo, settings, hide
# Default system user
env.user = 'ubuntu'
# Default puppet environment
env.environment = 'prod'
# Default puppet module directory
env.puppet_module_dir = 'modules/'
# Default puppet version
# If loom_puppet_version is None, loom installs the latest version
env.loom_puppet_version = '3.1.1'
# Default librarian version
# If loom_librarian_version is None, loom installs the latest version
env.loom_librarian_version = '0.9.9'
def host_roles(host_string):
"""
Returns the role of a given host string.
"""
roles = set()
for role, hosts in env.roledefs.items():
if host_string in hosts:
roles.add(role)
return list(roles)
def current_roles():
return host_roles(env.host_string)
def has_puppet_installed():
with settings(hide('warnings', 'running', 'stdout', 'stderr'), warn_only=True):
result = sudo('which puppet')
return result.succeeded
def has_librarian_installed():
with settings(hide('warnings', 'running', 'stdout', 'stderr'), warn_only=True):
librarian = sudo('which librarian-puppet')
return librarian.succeeded
| from fabric.api import env, run, settings, hide
# Default system user
env.user = 'ubuntu'
# Default puppet environment
env.environment = 'prod'
# Default puppet module directory
env.puppet_module_dir = 'modules/'
# Default puppet version
# If loom_puppet_version is None, loom installs the latest version
env.loom_puppet_version = '3.1.1'
# Default librarian version
# If loom_librarian_version is None, loom installs the latest version
env.loom_librarian_version = '0.9.9'
def host_roles(host_string):
"""
Returns the role of a given host string.
"""
roles = set()
for role, hosts in env.roledefs.items():
if host_string in hosts:
roles.add(role)
return list(roles)
def current_roles():
return host_roles(env.host_string)
def has_puppet_installed():
with settings(hide('warnings', 'running', 'stdout', 'stderr'), warn_only=True):
result = run('which puppet')
return result.succeeded
def has_librarian_installed():
with settings(hide('warnings', 'running', 'stdout', 'stderr'), warn_only=True):
librarian = run('which librarian-puppet')
return librarian.succeeded
| Revert "sudo is required to run which <gem-exec> on arch." | Revert "sudo is required to run which <gem-exec> on arch."
This reverts commit 15162c58c27bc84f1c7fc0326f782bd693ca4d7e.
| Python | bsd-3-clause | nithinphilips/loom,nithinphilips/loom |
22a678488d43f4ca7fc53c7894113b7895893e2a | mpltools/style/__init__.py | mpltools/style/__init__.py | """
This module defines styles redefine matplotlib rc parameters. In addition, you
can override pre-defined styles with "mplstyle" files in the current
directory and your home directory. The priority of style files is:
1. ./mplstyle
2. ~/.mplstyle
3. mpltools/style/
Style names should be specified as sections in "mplstyle" files. A simple
"mplstyle" file would look like:
[style1]
text.fontsize = 12
figure.dpi = 150
[style2]
text.fontsize = 10
font.family = 'serif'
Note that we use ConfigObj for parsing rc files so, unlike Matplotlib,
key/value pairs are separated by an equals sign and strings must be quoted.
Functions
=========
use
Redefine rc parameters using specified style.
reset
Reset rc parameters to matplotlib defaults.
lib
Style library.
baselib
Style library defined by mpltools (i.e. before user definitions).
"""
from core import *
| """
This module defines styles redefine matplotlib rc parameters. In addition, you
can override pre-defined styles with "mplstyle" files in the current
directory and your home directory. The priority of style files is:
1. ./mplstyle
2. ~/.mplstyle
3. mpltools/style/
Style names should be specified as sections in "mplstyle" files. A simple
"mplstyle" file would look like:
[style1]
text.fontsize = 12
figure.dpi = 150
[style2]
text.fontsize = 10
font.family = 'serif'
Note that we use ConfigObj for parsing rc files so, unlike Matplotlib,
key/value pairs are separated by an equals sign and strings must be quoted.
Functions
=========
use
Redefine rc parameters using specified style.
lib
Style library.
baselib
Style library defined by mpltools (i.e. before user definitions).
"""
from core import *
| Remove outdated function from list. | FIX: Remove outdated function from list.
| Python | bsd-3-clause | tonysyu/mpltools,matteoicardi/mpltools |
137f5542aff91d259e68684c79d41cc47648cee2 | mrburns/settings/server.py | mrburns/settings/server.py | import os
import socket
from .base import * # noqa
SERVER_ENV = os.getenv('DJANGO_SERVER_ENV')
SECRET_KEY = os.getenv('SECRET_KEY')
DEBUG = TEMPLATE_DEBUG = False
ALLOWED_HOSTS = [
'webwewant.mozilla.org',
'webwewant.allizom.org',
# the server's IP (for monitors)
socket.gethostbyname(socket.gethostname()),
]
CACHES = {
'default': {
'BACKEND': 'redis_cache.cache.RedisCache',
'LOCATION': 'unix:/var/run/redis/redis.sock:1',
'OPTIONS': {
'PARSER_CLASS': 'redis.connection.HiredisParser',
}
},
'smithers': {
'BACKEND': 'redis_cache.cache.RedisCache',
'LOCATION': 'unix:/var/run/redis/redis.sock:0',
'OPTIONS': {
'PARSER_CLASS': 'redis.connection.HiredisParser',
}
}
}
DJANGO_REDIS_IGNORE_EXCEPTIONS = False
ENABLE_REDIS = True
| import os
import socket
from .base import * # noqa
SERVER_ENV = os.getenv('DJANGO_SERVER_ENV')
SECRET_KEY = os.getenv('SECRET_KEY')
STATIC_URL = os.getenv('STATIC_URL', STATIC_URL)
DEBUG = TEMPLATE_DEBUG = False
ALLOWED_HOSTS = [
'webwewant.mozilla.org',
'webwewant.allizom.org',
# the server's IP (for monitors)
socket.gethostbyname(socket.gethostname()),
]
CACHES = {
'default': {
'BACKEND': 'redis_cache.cache.RedisCache',
'LOCATION': 'unix:/var/run/redis/redis.sock:1',
'OPTIONS': {
'PARSER_CLASS': 'redis.connection.HiredisParser',
}
},
'smithers': {
'BACKEND': 'redis_cache.cache.RedisCache',
'LOCATION': 'unix:/var/run/redis/redis.sock:0',
'OPTIONS': {
'PARSER_CLASS': 'redis.connection.HiredisParser',
}
}
}
DJANGO_REDIS_IGNORE_EXCEPTIONS = False
ENABLE_REDIS = True
| Set STATIC_URL from an env var if available. | Set STATIC_URL from an env var if available.
| Python | mpl-2.0 | almossawi/mrburns,mozilla/mrburns,almossawi/mrburns,mozilla/mrburns,almossawi/mrburns,almossawi/mrburns,mozilla/mrburns |
9f922f939ec19d0d9a9a91abb3e8b0d5b010c246 | djangoautoconf/management/commands/dump_settings.py | djangoautoconf/management/commands/dump_settings.py | import os
from django.core.management import BaseCommand
from django.conf import settings
def dump_attrs(obj_instance):
for attr in dir(obj_instance):
if attr != attr.upper():
continue
yield attr, getattr(obj_instance, attr)
class Command(BaseCommand):
args = ''
help = 'Create command cache for environment where os.listdir is not working'
def handle(self, *args, **options):
try:
os.remove("local/total_settings.py")
except:
pass
with open("local/total_settings.py", "w") as f:
for key, value in dump_attrs(settings):
if type(value) in (list, tuple, dict):
print >>f, key, "=", value
elif type(value) in (str, ):
print >>f, key, "=", '"'+str(value)+'"'
else:
print >>f, key, "=", str(value)
| import os
from django.core.management import BaseCommand
from django.conf import settings
def dump_attrs(obj_instance):
for attr in dir(obj_instance):
if attr != attr.upper():
continue
yield attr, getattr(obj_instance, attr)
class Command(BaseCommand):
args = ''
help = 'Create command cache for environment where os.listdir is not working'
def handle(self, *args, **options):
try:
os.remove("local/total_settings.py")
except:
pass
with open("local/total_settings.py", "w") as f:
for key, value in dump_attrs(settings):
if type(value) in (list, tuple, dict):
print >>f, key, "=", value
elif type(value) in (str, ):
print >>f, key, "=", '"'+str(value)+'"'
else:
print >>f, key, "=", '"'+str(value)+'"'
| Work around for dump setting issue. | Work around for dump setting issue.
| Python | bsd-3-clause | weijia/djangoautoconf,weijia/djangoautoconf |
4b88dff3df0c82392314efe9c48379e1ad2b1500 | vinotes/apps/api/serializers.py | vinotes/apps/api/serializers.py | from django.contrib.auth.models import User
from rest_framework import serializers
from .models import Note, Trait, Wine, Winery
class WinerySerializer(serializers.ModelSerializer):
wines = serializers.PrimaryKeyRelatedField(many=True, queryset=Wine.objects.all())
class Meta:
model = Winery
fields = ('id', 'name', 'wines')
class WineSerializer(serializers.ModelSerializer):
class Meta:
model = Wine
fields = ('id', 'winery', 'name', 'vintage')
class TraitSerializer(serializers.ModelSerializer):
class Meta:
model = Trait
fields = ('id', 'name')
class NoteSerializer(serializers.ModelSerializer):
taster = serializers.ReadOnlyField(source='taster.username')
class Meta:
model = Note
fields = ('id', 'taster', 'tasted', 'wine', 'color_traits',
'nose_traits', 'taste_traits', 'finish_traits', 'rating')
class UserSerializer(serializers.ModelSerializer):
notes = serializers.PrimaryKeyRelatedField(many=True, queryset=Note.objects.all())
class Meta:
model = User
fields = ('id', 'username', 'email', 'notes') | from django.contrib.auth.models import User
from rest_framework import serializers
from .models import Note, Trait, Wine, Winery
class WinerySerializer(serializers.ModelSerializer):
wines = serializers.PrimaryKeyRelatedField(many=True, queryset=Wine.objects.all())
class Meta:
model = Winery
fields = ('id', 'name', 'wines')
class WineSerializer(serializers.ModelSerializer):
class Meta:
model = Wine
fields = ('id', 'winery', 'name', 'vintage')
class TraitSerializer(serializers.ModelSerializer):
color_traits = serializers.PrimaryKeyRelatedField(many=True, queryset=Note.objects.all())
nose_traits = serializers.PrimaryKeyRelatedField(many=True, queryset=Note.objects.all())
taste_traits = serializers.PrimaryKeyRelatedField(many=True, queryset=Note.objects.all())
finish_traits = serializers.PrimaryKeyRelatedField(many=True, queryset=Note.objects.all())
class Meta:
model = Trait
fields = ('id', 'name', 'color_traits',
'nose_traits', 'taste_traits', 'finish_traits')
class NoteSerializer(serializers.ModelSerializer):
taster = serializers.ReadOnlyField(source='taster.username')
class Meta:
model = Note
fields = ('id', 'taster', 'tasted', 'wine', 'color_traits',
'nose_traits', 'taste_traits', 'finish_traits', 'rating')
class UserSerializer(serializers.ModelSerializer):
notes = serializers.PrimaryKeyRelatedField(many=True, queryset=Note.objects.all())
class Meta:
model = User
fields = ('id', 'username', 'email', 'notes') | Add trait's wines to serializer. | Add trait's wines to serializer.
| Python | unlicense | rcutmore/vinotes-api,rcutmore/vinotes-api |
85a869d29275f4ec8d03a53c0c4a6e82f36b0213 | stackdio/stackdio/settings/local.py | stackdio/stackdio/settings/local.py | # Grab the base settings
from .base import *
# Override at will!
# Hosts/domain names that are valid for this site; required if DEBUG is False
# See https://docs.djangoproject.com/en/1.5/ref/settings/#allowed-hosts
ALLOWED_HOSTS = ['*']
##
#
##
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'stackdio',
'HOST': 'localhost',
'PORT': '3306',
'USER': getenv('MYSQL_USER'),
'PASSWORD': getenv('MYSQL_PASS'),
}
}
##
# Celery & RabbitMQ
##
BROKER_URL = 'amqp://guest:guest@localhost:5672/'
##
# Add in additional middleware
##
MIDDLEWARE_CLASSES += ('debug_toolbar.middleware.DebugToolbarMiddleware',)
##
# Add in additional applications
##
INSTALLED_APPS += ('debug_toolbar',)
##
# For debug_toolbar to load
##
INTERNAL_IPS = ('127.0.0.1',)
##
# The local storage directory for storing file data
##
FILE_STORAGE_DIRECTORY = normpath(join(SITE_ROOT, 'storage'))
| # Grab the base settings
from .base import *
# Override at will!
# Hosts/domain names that are valid for this site; required if DEBUG is False
# See https://docs.djangoproject.com/en/1.5/ref/settings/#allowed-hosts
ALLOWED_HOSTS = ['*']
##
#
##
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'stackdio',
'HOST': 'localhost',
'PORT': '3306',
'USER': getenv('MYSQL_USER'),
'PASSWORD': getenv('MYSQL_PASS'),
}
}
##
# Celery & RabbitMQ
##
BROKER_URL = 'amqp://guest:guest@localhost:5672/'
##
# Add in additional middleware
##
MIDDLEWARE_CLASSES += ('debug_toolbar.middleware.DebugToolbarMiddleware',)
##
# Add in additional applications
##
INSTALLED_APPS += ('debug_toolbar',)
##
# For debug_toolbar to load
##
INTERNAL_IPS = ('127.0.0.1',)
##
# debug_toolbar configuration
##
DEBUG_TOOLBAR_CONFIG = {
'INTERCEPT_REDIRECTS': False,
}
##
# The local storage directory for storing file data
##
FILE_STORAGE_DIRECTORY = normpath(join(SITE_ROOT, 'storage'))
| Disable debug_toolbar from catching http redirects...pretty annoying | PI-27: Disable debug_toolbar from catching http redirects...pretty annoying
| Python | apache-2.0 | clarkperkins/stackdio,clarkperkins/stackdio,stackdio/stackdio,clarkperkins/stackdio,stackdio/stackdio,stackdio/stackdio,stackdio/stackdio,clarkperkins/stackdio |
a34c594a13a79a864d1b747d84a0074e7711dd42 | testanalyzer/pythonanalyzer.py | testanalyzer/pythonanalyzer.py | import re
from fileanalyzer import FileAnalyzer
class PythonAnalyzer(FileAnalyzer):
def get_class_count(self, content):
return len(
re.findall("class [a-zA-Z0-9_]+\(?[a-zA-Z0-9_, ]*\)?:", content))
def get_function_count(self, content):
return len(
re.findall("def [a-zA-Z0-9_]+\([a-zA-Z0-9_, ]*\):", content))
| import re
from fileanalyzer import FileAnalyzer
class PythonAnalyzer(FileAnalyzer):
def get_class_count(self, content):
return len(
re.findall("class +[a-zA-Z0-9_]+ *\(?[a-zA-Z0-9_, ]*\)? *:", content))
def get_function_count(self, content):
return len(
re.findall("def +[a-zA-Z0-9_]+ *\([a-zA-Z0-9_, ]*\) *:", content))
| Update regex to allow spaces | Update regex to allow spaces
| Python | mpl-2.0 | CheriPai/TestAnalyzer,CheriPai/TestAnalyzer,CheriPai/TestAnalyzer |
e3835baeb03da43456442fdd2678891cf2b6f957 | DeployUtil/authentication.py | DeployUtil/authentication.py | import urllib.request
import ssl
import http.cookiejar
#TODO: give an indicator of success
#TODO: handle errors a bit better.
def do_pair(ip, pin, **_args):
# IF YOU DON'T DO THIS OVER HTTPS YOU WILL GET 308s to goto HTTPS
scheme = 'https://'
port = ''
api = '/api/authorize/pair?pin={pin}&persistent=0'
verb = 'POST'
request_url = scheme + ip + port + api.format_map({'pin':pin})
context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
context.verify_mode = ssl.CERT_NONE
https_handler = urllib.request.HTTPSHandler(context=context)
request = urllib.request.Request(url=request_url, method=verb)
cookies = urllib.request.HTTPCookieProcessor(http.cookiejar.MozillaCookieJar("deployUtil.cookies"))
opener = urllib.request.build_opener(WDPRedirectHandler(), https_handler, cookies)
resp = opener.open(request)
cookies.cookiejar.save(ignore_discard=True)
| import urllib.request
import ssl
import http.cookiejar
#TODO: give an indicator of success
#TODO: handle errors a bit better.
def do_pair(ip, pin, **_args):
# IF YOU DON'T DO THIS OVER HTTPS YOU WILL GET 308s to goto HTTPS
scheme = 'https://'
port = ''
api = '/api/authorize/pair?pin={pin}&persistent=0'
verb = 'POST'
request_url = scheme + ip + port + api.format_map({'pin':pin})
context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
context.verify_mode = ssl.CERT_NONE
https_handler = urllib.request.HTTPSHandler(context=context)
request = urllib.request.Request(url=request_url, method=verb)
cookies = urllib.request.HTTPCookieProcessor(http.cookiejar.MozillaCookieJar("deployUtil.cookies"))
opener = urllib.request.build_opener(https_handler, cookies)
resp = opener.open(request)
cookies.cookiejar.save(ignore_discard=True)
| Fix a bug in cleanup | Fix a bug in cleanup
| Python | mit | loarabia/DeployUtil |
3d8f642460cf5c26dd8f58a5a36786b3ef4069e8 | ogusa/tests/test_txfunc.py | ogusa/tests/test_txfunc.py | import pickle
from ogusa import txfunc
def test_cps_data():
with open("../../regression/cps_test_replace_outliers.pkl", 'rb') as p:
param_arr = pickle.load(p)
sse_big_mat = pickle.load(p)
txfunc.replace_outliers(param_arr, sse_big_mat)
| from ogusa import txfunc
import numpy as np
import pickle
import os
CUR_PATH = os.path.abspath(os.path.dirname(__file__))
def test_replace_outliers():
"""
4 cases:
s is an outlier and is 0
s is an outlier and is in the interior (s > 0 and s < S)
s is not an outlier but the first s - 1 ages were (s = 1 in our case)
s is an outlier and is the max age
"""
S = 20
BW = 2
numparams = 5
param_arr = np.random.rand(S * BW * numparams).reshape(S, BW, numparams)
sse_big_mat = ~ np.ones((S, BW), dtype=bool)
sse_big_mat[0, 0] = True
sse_big_mat[1, 0] = True
sse_big_mat[S-11, 0] = True
sse_big_mat[S-10, 0] = True
sse_big_mat[S - 2, 0] = True
sse_big_mat[S - 1, 0] = True
txfunc.replace_outliers(param_arr, sse_big_mat)
| Use simulated data for test | Use simulated data for test
| Python | mit | OpenSourcePolicyCenter/dynamic,OpenSourcePolicyCenter/dynamic,OpenSourcePolicyCenter/dynamic,OpenSourcePolicyCenter/dynamic,OpenSourcePolicyCenter/dynamic |
dfa2ef8092af45b01ee504b308fc10623552f406 | devicehive/transports/base_transport.py | devicehive/transports/base_transport.py | import uuid
class BaseTransport(object):
"""Base transport class."""
REQUEST_ID_KEY = 'requestId'
REQUEST_ACTION_KEY = 'action'
def __init__(self, data_format_class, data_format_options, handler_class,
handler_options, name):
self._data_format = data_format_class(**data_format_options)
self._data_type = self._data_format.data_type
self._handler = handler_class(self, **handler_options)
self._connected = False
self._name = name
@staticmethod
def _uuid():
return str(uuid.uuid1())
def _assert_not_connected(self):
assert not self._connected, 'transport connection already created'
def _assert_connected(self):
assert self._connected, 'transport connection has not created'
def _encode(self, obj):
return self._data_format.encode(obj)
def _decode(self, data):
return self._data_format.decode(data)
def _call_handler_method(self, name, *args):
getattr(self._handler, name)(*args)
def name(self):
return self._name
def is_connected(self):
return self._connected
def connect(self, url, **options):
raise NotImplementedError
def send_request(self, action, request, **params):
raise NotImplementedError
def request(self, action, request, **params):
raise NotImplementedError
def close(self):
raise NotImplementedError
def join(self, timeout=None):
raise NotImplementedError
class BaseTransportException(Exception):
"""Base transport exception."""
pass
| import uuid
class BaseTransport(object):
"""Base transport class."""
REQUEST_ID_KEY = 'requestId'
REQUEST_ACTION_KEY = 'action'
def __init__(self, data_format_class, data_format_options, handler_class,
handler_options, name):
self._data_format = data_format_class(**data_format_options)
self._data_type = self._data_format.data_type
self._handler = handler_class(self, **handler_options)
self._connected = False
self._name = name
@staticmethod
def _uuid():
return str(uuid.uuid1())
def _assert_not_connected(self):
assert not self._connected, 'transport connection already created'
def _assert_connected(self):
assert self._connected, 'transport connection has not created'
def _encode(self, obj):
return self._data_format.encode(obj)
def _decode(self, data):
return self._data_format.decode(data)
def _call_handler_method(self, name, *args):
getattr(self._handler, name)(*args)
def name(self):
return self._name
def connected(self):
return self._connected
def connect(self, url, **options):
raise NotImplementedError
def send_request(self, action, request, **params):
raise NotImplementedError
def request(self, action, request, **params):
raise NotImplementedError
def close(self):
raise NotImplementedError
def join(self, timeout=None):
raise NotImplementedError
class BaseTransportException(Exception):
"""Base transport exception."""
pass
| Rename is_connected method to connected | Rename is_connected method to connected
| Python | apache-2.0 | devicehive/devicehive-python |
ddbd66713fd8f146509413772f4a4e3801f5fbf8 | ynr/apps/sopn_parsing/models.py | ynr/apps/sopn_parsing/models.py | import json
from django.db import models
from model_utils.models import TimeStampedModel
class ParsedSOPN(TimeStampedModel):
"""
A model for storing the parsed data out of a PDF
"""
sopn = models.OneToOneField(
"official_documents.OfficialDocument", on_delete=models.CASCADE
)
raw_data = models.TextField()
raw_data_type = models.CharField(max_length=255, default="pandas")
parsed_data = models.TextField(null=True)
status = models.CharField(max_length=255, default="unparsed")
@property
def as_pandas(self):
import pandas
pandas.set_option("display.max_colwidth", -1)
return pandas.DataFrame.from_dict(json.loads(self.raw_data))
@property
def data_as_html(self):
if self.raw_data_type == "pandas":
data = self.as_pandas
header = data.iloc[0]
data = data[1:]
data.columns = header
return data.to_html(index=False, escape=False).replace(
"\\n", "<br>"
)
| import json
from django.db import models
from model_utils.models import TimeStampedModel
class ParsedSOPN(TimeStampedModel):
"""
A model for storing the parsed data out of a PDF
"""
sopn = models.OneToOneField(
"official_documents.OfficialDocument", on_delete=models.CASCADE
)
raw_data = models.TextField()
raw_data_type = models.CharField(max_length=255, default="pandas")
parsed_data = models.TextField(null=True)
status = models.CharField(max_length=255, default="unparsed")
@property
def as_pandas(self):
import pandas
pandas.set_option("display.max_colwidth", None)
return pandas.DataFrame.from_dict(json.loads(self.raw_data))
@property
def data_as_html(self):
if self.raw_data_type == "pandas":
data = self.as_pandas
header = data.iloc[0]
data = data[1:]
data.columns = header
return data.to_html(index=False, escape=False).replace(
"\\n", "<br>"
)
| Use None rather than -1 for Pandas | Use None rather than -1 for Pandas
| Python | agpl-3.0 | DemocracyClub/yournextrepresentative,DemocracyClub/yournextrepresentative,DemocracyClub/yournextrepresentative |
f9787e6117ffcc1ff9571267f29c7993fbe08f20 | numpy/doc/pyrex/setup.py | numpy/doc/pyrex/setup.py | #!/usr/bin/env python
"""Install file for example on how to use Pyrex with Numpy.
For more details, see:
http://www.scipy.org/Cookbook/Pyrex_and_NumPy
http://www.scipy.org/Cookbook/ArrayStruct_and_Pyrex
"""
from distutils.core import setup
from distutils.extension import Extension
# Make this usable by people who don't have pyrex installed (I've committed
# the generated C sources to SVN).
try:
from Pyrex.Distutils import build_ext
has_pyrex = True
except ImportError:
has_pyrex = False
import numpy
# Define a pyrex-based extension module, using the generated sources if pyrex
# is not available.
if has_pyrex:
pyx_sources = ['numpyx.pyx']
cmdclass = {'build_ext': build_ext}
else:
pyx_sources = ['numpyx.c']
cmdclass = {}
pyx_ext = Extension('numpyx',
pyx_sources,
include_dirs = [numpy.get_numpy_include()])
# Call the routine which does the real work
setup(name = 'numpyx',
description = 'Small example on using Pyrex to write a Numpy extension',
url = 'http://www.scipy.org/Cookbook/Pyrex_and_NumPy',
ext_modules = [pyx_ext],
cmdclass = cmdclass,
)
| #!/usr/bin/env python
"""Install file for example on how to use Pyrex with Numpy.
For more details, see:
http://www.scipy.org/Cookbook/Pyrex_and_NumPy
http://www.scipy.org/Cookbook/ArrayStruct_and_Pyrex
"""
from distutils.core import setup
from distutils.extension import Extension
# Make this usable by people who don't have pyrex installed (I've committed
# the generated C sources to SVN).
try:
from Pyrex.Distutils import build_ext
has_pyrex = True
except ImportError:
has_pyrex = False
import numpy
# Define a pyrex-based extension module, using the generated sources if pyrex
# is not available.
if has_pyrex:
pyx_sources = ['numpyx.pyx']
cmdclass = {'build_ext': build_ext}
else:
pyx_sources = ['numpyx.c']
cmdclass = {}
pyx_ext = Extension('numpyx',
pyx_sources,
include_dirs = [numpy.get_include()])
# Call the routine which does the real work
setup(name = 'numpyx',
description = 'Small example on using Pyrex to write a Numpy extension',
url = 'http://www.scipy.org/Cookbook/Pyrex_and_NumPy',
ext_modules = [pyx_ext],
cmdclass = cmdclass,
)
| Use get_include instead of get_numpy_include. | Use get_include instead of get_numpy_include.
git-svn-id: 77a43f9646713b91fea7788fad5dfbf67e151ece@3525 94b884b6-d6fd-0310-90d3-974f1d3f35e1
| Python | bsd-3-clause | illume/numpy3k,jasonmccampbell/numpy-refactor-sprint,chadnetzer/numpy-gaurdro,jasonmccampbell/numpy-refactor-sprint,illume/numpy3k,chadnetzer/numpy-gaurdro,teoliphant/numpy-refactor,Ademan/NumPy-GSoC,jasonmccampbell/numpy-refactor-sprint,chadnetzer/numpy-gaurdro,efiring/numpy-work,efiring/numpy-work,jasonmccampbell/numpy-refactor-sprint,Ademan/NumPy-GSoC,teoliphant/numpy-refactor,efiring/numpy-work,teoliphant/numpy-refactor,illume/numpy3k,Ademan/NumPy-GSoC,teoliphant/numpy-refactor,illume/numpy3k,efiring/numpy-work,teoliphant/numpy-refactor,chadnetzer/numpy-gaurdro,Ademan/NumPy-GSoC |
c022774b37130a1c1744aabecb480279802bdeba | linecounter.py | linecounter.py | #!/usr/bin/env python
from flask import Flask
from flask_restplus import Resource, Api, reqparse
from werkzeug.datastructures import FileStorage
from time import sleep
from random import randint
import socket
app = Flask(__name__)
api = Api(app, version='1.0', title='LineCounter', description='This REST service exposes an endpoint that counts the lines of a given text file')
upload_parser = api.parser()
upload_parser.add_argument('text', location='files', type=FileStorage)
requests_list = []
@api.route('/lines')
@api.expect(upload_parser)
class LineCounter(Resource):
def post(self):
requests_list.append(1)
args = upload_parser.parse_args()
text = args['text']
sleep(randint(2, 30))
if text:
result = len(text.read().decode('utf-8').split('\n'))
requests_list.pop()
return {'count': result}
else:
requests_list.pop()
return {'count': 0}
@api.route('/info')
class ServiceName(Resource):
def get(self):
return {
'service_name': 'WordCounter',
'hostname': socket.gethostname(),
'nb_requests': len(requests_list)
}
if __name__ == '__main__':
app.run(threaded=True, host='0.0.0.0')
| #!/usr/bin/env python
from flask import Flask
from flask_restplus import Resource, Api, reqparse
from werkzeug.datastructures import FileStorage
from time import sleep
from random import randint
import socket
app = Flask(__name__)
api = Api(app, version='1.0', title='LineCounter', description='This REST service exposes an endpoint that counts the lines of a given text file')
upload_parser = api.parser()
upload_parser.add_argument('text', location='files', type=FileStorage)
requests_list = []
@api.route('/lines')
@api.expect(upload_parser)
class LineCounter(Resource):
def post(self):
requests_list.append(1)
args = upload_parser.parse_args()
text = args['text']
sleep(randint(2, 30))
if text:
result = len(text.read().decode('utf-8').split('\n'))
requests_list.pop()
return {'count': result}
else:
requests_list.pop()
return {'count': 0}
@api.route('/info')
class ServiceName(Resource):
def get(self):
return {
'service_name': 'LineCounter',
'hostname': socket.gethostname(),
'nb_requests': len(requests_list)
}
if __name__ == '__main__':
app.run(threaded=True, host='0.0.0.0')
| Fix the service name for the LineCounter | Fix the service name for the LineCounter
| Python | mit | paraita/simplerestservice |
1b962b12d40a43280de9ab4ed974d69613081bf4 | surok/discovery.py | surok/discovery.py | import dns.resolver
import dns.query
from dns.exception import DNSException
# Resolve service from mesos-dns SRV record
# return dict {"servicename": [{"name": "service.f.q.d.n.", "port": 9999}]}
def resolve(app, conf):
hosts = {}
services = app['services']
domain = conf['domain']
group = None
if app['env'].get('SUROK_DISCOVERY_GROUP') is not None:
group = app['env']['SUROK_DISCOVERY_GROUP']
for service in services:
hosts[service['name']] = []
# Check group configuration
if group is not None:
pass
else:
# Load group from service config
# /etc/surok/conf.d/service_conf.json
group = service['group']
fqdn = '_' + service['name'] + '.' + group + '._tcp.' + domain
hosts[service['name']] = do_query(fqdn)
return hosts
# Do SRV queries
# Return array: [{"name": "f.q.d.n", "port": 8876}]
def do_query(fqdn):
servers = []
try:
query = dns.resolver.query(fqdn, 'SRV')
query.lifetime = 1.0
for rdata in query:
info = str(rdata).split()
server = {'name': info[3], 'port': info[2]}
servers.append(server)
except DNSException:
print("Could not resolve " + fqdn)
return servers
| import dns.resolver
import dns.query
from dns.exception import DNSException
# Resolve service from mesos-dns SRV record
# return dict {"servicename": [{"name": "service.f.q.d.n.", "port": 9999}]}
def resolve(app, conf):
hosts = {}
services = app['services']
domain = conf['domain']
group = None
if app['env'].get('SUROK_DISCOVERY_GROUP') is not None:
group = app['env']['SUROK_DISCOVERY_GROUP']
for service in services:
hosts[service['name']] = []
# Check group configuration
if group is not None:
pass
else:
# Load group from service config
# /etc/surok/conf.d/service_conf.json
group = service['group']
fqdn = '_' + service['name'] + '.' + group + '._tcp.' + domain
hosts[service['name']] = do_query(fqdn)
return hosts
# Do SRV queries
# Return array: [{"name": "f.q.d.n", "port": 8876}]
def do_query(fqdn):
servers = []
try:
query = dns.resolver.query(fqdn, 'SRV')
query.lifetime = 1.0
for rdata in query:
info = str(rdata).split()
server = {'name': info[3][:-1], 'port': info[2]}
servers.append(server)
except DNSException:
print("Could not resolve " + fqdn)
return servers
| Remove ending '.' in hostname. (for those fucking libs that knows nothing about RFC) | Remove ending '.' in hostname. (for those fucking libs that knows nothing about RFC)
| Python | bsd-3-clause | Difrex/surok,Difrex/surok |
0b388559f1e130af84d13991a66cf61448d39429 | common/hil_slurm_settings.py | common/hil_slurm_settings.py | """
MassOpenCloud / Hardware Isolation Layer (HIL)
Slurm / HIL Control Settings
May 2017, Tim Donahue tpd001@gmail.com
"""
DEBUG = True
SLURM_INSTALL_DIR = '/usr/bin/'
HIL_SLURMCTLD_PROLOG_LOGFILE = '/var/log/slurm-llnl/hil_prolog.log'
HIL_MONITOR_LOGFILE = '/var/log/slurm-llnl/hil_monitor.log'
HIL_RESERVATIONS_FILE = '/var/local/slurm-llnl/hil_reservations.txt'
USER_HIL_SUBDIR = '.hil'
USER_HIL_LOGFILE = 'hil_reservations.log'
HIL_CMD_NAMES = ('hil_reserve', 'hil_release')
HIL_PARTITION_PREFIX = 'HIL_partition_'
HIL_PARTITION_PREFIX = 'debug'
HIL_RESERVATION_DEFAULT_DURATION = 24 * 60 * 60 # Seconds
HIL_RESERVATION_GRACE_PERIOD = 4 * 60 * 60 # Seconds
HIL_RESERVATION_PREFIX = 'flexalloc_MOC_'
# Partition validation controls
RES_CHECK_DEFAULT_PARTITION = False
RES_CHECK_EXCLUSIVE_PARTITION = False
RES_CHECK_SHARED_PARTITION = False
RES_CHECK_PARTITION_STATE = True
# EOF
| """
MassOpenCloud / Hardware Isolation Layer (HIL)
Slurm / HIL Control Settings
May 2017, Tim Donahue tpd001@gmail.com
"""
DEBUG = True
SLURM_INSTALL_DIR = '/usr/local/bin/'
HIL_SLURMCTLD_PROLOG_LOGFILE = '/var/log/slurm-llnl/hil_prolog.log'
HIL_MONITOR_LOGFILE = '/var/log/slurm-llnl/hil_monitor.log'
HIL_RESERVATIONS_FILE = '/var/local/slurm-llnl/hil_reservations.txt'
USER_HIL_SUBDIR = '.hil'
USER_HIL_LOGFILE = 'hil_reservations.log'
HIL_CMD_NAMES = ('hil_reserve', 'hil_release')
HIL_PARTITION_PREFIX = 'HIL_partition_'
HIL_PARTITION_PREFIX = 'debug'
HIL_RESERVATION_DEFAULT_DURATION = 24 * 60 * 60 # Seconds
HIL_RESERVATION_GRACE_PERIOD = 4 * 60 * 60 # Seconds
HIL_RESERVATION_PREFIX = 'flexalloc_MOC_'
# Partition validation controls
RES_CHECK_DEFAULT_PARTITION = False
RES_CHECK_EXCLUSIVE_PARTITION = False
RES_CHECK_SHARED_PARTITION = False
RES_CHECK_PARTITION_STATE = True
# EOF
| Change install directory - now /usr/local/bin | Change install directory - now /usr/local/bin
| Python | mit | mghpcc-projects/user_level_slurm_reservations,mghpcc-projects/user_level_slurm_reservations |
4fa157dbb0fc7323ca89b3e655469062935f84c1 | Main.py | Main.py | """Main Module of PDF Splitter"""
import argparse
import os
from PyPDF2 import PdfFileWriter
from Util import all_pdf_files_in_directory, split_on_condition, concat_pdf_pages
parser = \
argparse.ArgumentParser(
description='Split all the pages of multiple PDF files in a directory by document number'
)
parser.add_argument(
'directory',
metavar='PATH',
type=str,
help='path to a directory'
)
def width_greater_than_height(page):
box = page.mediaBox
return box.getWidth() > box.getHeight()
if __name__ == '__main__':
args = parser.parse_args()
directory = args.directory
all_pdf_files = [os.path.join(directory, filename) for filename in all_pdf_files_in_directory(directory)]
opened_files = map(lambda path: open(path, 'rb'), all_pdf_files)
all_pages = concat_pdf_pages(opened_files)
for idx, pages in enumerate(split_on_condition(all_pages, predicate=width_greater_than_height), start=1):
pdf_writer = PdfFileWriter()
map(pdf_writer.addPage, pages)
output_filename = '{0:05}.pdf'.format(idx)
with open(output_filename, 'wb') as output_file:
pdf_writer.write(output_file)
output_file.flush()
os.fsync(output_file.fileno())
map(lambda f: f.close, opened_files)
| """Main Module of PDF Splitter"""
import argparse
import os
from PyPDF2 import PdfFileWriter
from Util import all_pdf_files_in_directory, split_on_condition, concat_pdf_pages
parser = \
argparse.ArgumentParser(
description='Split all the pages of multiple PDF files in a directory by document number'
)
parser.add_argument(
'directory',
metavar='PATH',
type=str,
help='path to a directory'
)
def main():
args = parser.parse_args()
directory = args.directory
all_pdf_files = [os.path.join(directory, filename) for filename in all_pdf_files_in_directory(directory)]
opened_files = map(lambda path: open(path, 'rb'), all_pdf_files)
all_pages = concat_pdf_pages(opened_files)
for idx, pages in enumerate(split_on_condition(all_pages, predicate=width_greater_than_height), start=1):
pdf_writer = PdfFileWriter()
map(pdf_writer.addPage, pages)
output_filename = '{0:05}.pdf'.format(idx)
with open(output_filename, 'wb') as output_file:
pdf_writer.write(output_file)
output_file.flush()
os.fsync(output_file.fileno())
map(lambda f: f.close, opened_files)
def width_greater_than_height(page):
box = page.mediaBox
return box.getWidth() > box.getHeight()
if __name__ == '__main__':
main()
| Refactor main as a separate function | Refactor main as a separate function
| Python | mit | shunghsiyu/pdf-processor |
a4740e9c2bf2e582ab78b8fa1aaf904c72501ee2 | multivid_cl.py | multivid_cl.py | #!/usr/bin/env python
import search
import tmap
if __name__ == "__main__":
from pprint import pprint as pp
import sys
to_dict = lambda r: r.to_dict()
h = search.HuluSearch()
a = search.AmazonSearch()
n = search.NetflixSearch()
# get the query from the first argument or from user input
if len(sys.argv) > 1:
query = sys.argv[1]
else:
query = raw_input("search: ")
# get a shorter query to use for autocomplete
ac_query = query[0:3]
ac_results = tmap.map(lambda s: s.autocomplete(ac_query), (a, h, n),
num_threads=3)
autocomplete_results = {
"amazon": ac_results[0],
"hulu": ac_results[1],
"netflix": ac_results[2],
}
print "autocomplete results for '" + ac_query + "':"
pp(autocomplete_results)
print
results = tmap.map(lambda s: s.find(query), (a, h, n), num_threads=3)
search_results = {
"amazon": map(to_dict, results[0]),
"hulu": map(to_dict, results[1]),
"netflix": map(to_dict, results[2])
}
print "search results for '" + query + "':"
pp(search_results)
print
| #!/usr/bin/env python
import search
import tmap
if __name__ == "__main__":
from pprint import pprint as pp
import sys
to_dict = lambda r: r.to_dict()
h = search.HuluSearch()
a = search.AmazonSearch()
n = search.NetflixSearch()
# get the query from the first argument or from user input
if len(sys.argv) > 1:
query = sys.argv[1]
if query.strip() == "":
raise ValueError("Non-blank query string is required!")
else:
query = ""
# get a non-blank query string
while query.strip() == "":
query = raw_input("search: ")
# get a shorter query to use for autocomplete
ac_query = query[0:3]
ac_results = tmap.map(lambda s: s.autocomplete(ac_query), (a, h, n),
num_threads=3)
autocomplete_results = {
"amazon": ac_results[0],
"hulu": ac_results[1],
"netflix": ac_results[2],
}
print "autocomplete results for '" + ac_query + "':"
pp(autocomplete_results)
print
results = tmap.map(lambda s: s.find(query), (a, h, n), num_threads=3)
search_results = {
"amazon": map(to_dict, results[0]),
"hulu": map(to_dict, results[1]),
"netflix": map(to_dict, results[2])
}
print "search results for '" + query + "':"
pp(search_results)
print
| Change CL to require a non-blank query | Change CL to require a non-blank query
| Python | mit | jasontbradshaw/multivid,jasontbradshaw/multivid |
f5798bd346f939a4e098e09dd44fb08bc04ff1f5 | ircstat/log.py | ircstat/log.py | # Copyright 2013 John Reese
# Licensed under the MIT license
import logging
logging.addLevelName(logging.DEBUG, 'DBG')
logging.addLevelName(logging.INFO, 'INF')
logging.addLevelName(logging.WARNING, 'WRN')
logging.addLevelName(logging.ERROR, 'ERR')
def logger(name=None):
log = logging.getLogger(name)
log.setLevel(logging.DEBUG)
fm = logging.Formatter('%(levelname)s %(message)s')
sh = logging.StreamHandler()
sh.setLevel(logging.DEBUG)
sh.setFormatter(fm)
log.addHandler(sh)
return log
| # Copyright 2013 John Reese
# Licensed under the MIT license
import logging
logging.addLevelName(logging.DEBUG, 'DBG')
logging.addLevelName(logging.INFO, 'INF')
logging.addLevelName(logging.WARNING, 'WRN')
logging.addLevelName(logging.ERROR, 'ERR')
sh = None
def logger(name=None):
global sh
log = logging.getLogger(name)
log.setLevel(logging.DEBUG)
if sh is None:
fm = logging.Formatter('%(levelname)s %(message)s')
sh = logging.StreamHandler()
sh.setLevel(logging.DEBUG)
sh.setFormatter(fm)
log.addHandler(sh)
return log
| Fix issue with duplicate output | Fix issue with duplicate output
| Python | mit | jreese/ircstat,jreese/ircstat |
d92c2dba7e549cee8059ecf4f1017956a630cd7a | web3/utils/validation.py | web3/utils/validation.py | from eth_utils import (
is_address,
is_checksum_address,
is_checksum_formatted_address,
is_dict,
is_list_like,
)
def validate_abi(abi):
"""
Helper function for validating an ABI
"""
if not is_list_like(abi):
raise ValueError("'abi' is not a list")
for e in abi:
if not is_dict(e):
raise ValueError("The elements of 'abi' are not all dictionaries")
def validate_address(value):
"""
Helper function for validating an address
"""
if not is_address(value):
raise ValueError("'{0}' is not an address".format(value))
validate_address_checksum(value)
def validate_address_checksum(value):
"""
Helper function for validating an address EIP55 checksum
"""
if is_checksum_formatted_address(value):
if not is_checksum_address(value):
raise ValueError("'{0}' has an invalid EIP55 checksum".format(value))
| from eth_utils import (
is_address,
is_checksum_address,
is_checksum_formatted_address,
is_dict,
is_list_like,
)
def validate_abi(abi):
"""
Helper function for validating an ABI
"""
if not is_list_like(abi):
raise ValueError("'abi' is not a list")
for e in abi:
if not is_dict(e):
raise ValueError("The elements of 'abi' are not all dictionaries")
def validate_address(value):
"""
Helper function for validating an address
"""
validate_address_checksum(value)
if not is_address(value):
raise ValueError("'{0}' is not an address".format(value))
def validate_address_checksum(value):
"""
Helper function for validating an address EIP55 checksum
"""
if is_checksum_formatted_address(value):
if not is_checksum_address(value):
raise ValueError("'{0}' has an invalid EIP55 checksum".format(value))
| Raise error specific to address checksum failure | Raise error specific to address checksum failure
Because is_address() also checks for a valid checksum, the old code showed a generic "not an address" error if the checksum failed. | Python | mit | pipermerriam/web3.py |
6a827bee5263c9bb5d34d6ac971581c62e827e7d | pinax/comments/models.py | pinax/comments/models.py | from datetime import datetime
from django.conf import settings
from django.contrib.contenttypes.fields import GenericForeignKey
from django.contrib.contenttypes.models import ContentType
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class Comment(models.Model):
author = models.ForeignKey(settings.AUTH_USER_MODEL, null=True, related_name="comments", on_delete=models.CASCADE)
name = models.CharField(max_length=100)
email = models.CharField(max_length=255, blank=True)
website = models.CharField(max_length=255, blank=True)
content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE)
object_id = models.IntegerField()
content_object = GenericForeignKey()
comment = models.TextField()
submit_date = models.DateTimeField(default=datetime.now)
ip_address = models.GenericIPAddressField(null=True)
public = models.BooleanField(default=True)
@property
def data(self):
return {
"pk": self.pk,
"comment": self.comment,
"author": self.author.username if self.author else "",
"name": self.name,
"email": self.email,
"website": self.website,
"submit_date": str(self.submit_date)
}
def __str__(self):
return "pk=%d" % self.pk # pragma: no cover
| from datetime import datetime
from django.conf import settings
from django.contrib.contenttypes.fields import GenericForeignKey
from django.contrib.contenttypes.models import ContentType
from django.db import models
class Comment(models.Model):
author = models.ForeignKey(settings.AUTH_USER_MODEL, null=True, related_name="comments", on_delete=models.CASCADE)
name = models.CharField(max_length=100)
email = models.CharField(max_length=255, blank=True)
website = models.CharField(max_length=255, blank=True)
content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE)
object_id = models.IntegerField()
content_object = GenericForeignKey()
comment = models.TextField()
submit_date = models.DateTimeField(default=datetime.now)
ip_address = models.GenericIPAddressField(null=True)
public = models.BooleanField(default=True)
@property
def data(self):
return {
"pk": self.pk,
"comment": self.comment,
"author": self.author.username if self.author else "",
"name": self.name,
"email": self.email,
"website": self.website,
"submit_date": str(self.submit_date)
}
def __str__(self):
return "pk=%d" % self.pk # pragma: no cover
| Change syntax to drop support | Change syntax to drop support
| Python | mit | pinax/pinax-comments,pinax/pinax-comments,eldarion/dialogos |
af51ef98d8575e7832d79c1068c092d388866dcb | donut/donut_SMTP_handler.py | donut/donut_SMTP_handler.py | from logging.handlers import SMTPHandler
DEV_TEAM_EMAILS_QUERY = '''SELECT DISTINCT email FROM
members NATURAL JOIN current_position_holders NATURAL JOIN positions NATURAL JOIN groups
WHERE group_name = "Devteam"
'''
class DonutSMTPHandler(SMTPHandler):
def __init__(self,
mailhost,
fromaddr,
toaddrs,
subject,
db_instance,
credentials=None,
secure=None,
timeout=5.0):
super().__init__(mailhost, fromaddr, toaddrs, subject, credentials,
secure, timeout)
self.db_instance = db_instance
def emit(self, record):
'''
Overrides SMTPHandler's emit such that we dynamically
get current donut dev team members
'''
self.toaddrs = self.getAdmins()
super().emit(record)
def getAdmins(self):
''' Returns current members in Devteam '''
with self.db_instance.cursor() as cursor:
cursor.execute(DEV_TEAM_EMAILS_QUERY, [])
res = cursor.fetchall()
return [result['email'] for result in res]
| from logging.handlers import SMTPHandler
DEV_TEAM_EMAILS_QUERY = '''SELECT DISTINCT email FROM
members NATURAL JOIN current_position_holders NATURAL JOIN positions NATURAL JOIN groups
WHERE group_name = "Devteam"
'''
DEFAULT_DEV_TEAM_EMAILS = ['devteam@donut.caltech.edu']
class DonutSMTPHandler(SMTPHandler):
def __init__(self,
mailhost,
fromaddr,
toaddrs,
subject,
db_instance,
credentials=None,
secure=None,
timeout=5.0):
super().__init__(mailhost, fromaddr, toaddrs, subject, credentials,
secure, timeout)
self.db_instance = db_instance
def emit(self, record):
'''
Overrides SMTPHandler's emit such that we dynamically
get current donut dev team members
'''
self.toaddrs = self.getAdmins()
super().emit(record)
def getAdmins(self):
''' Returns current members in Devteam '''
try:
with self.db_instance.cursor() as cursor:
cursor.execute(DEV_TEAM_EMAILS_QUERY)
res = cursor.fetchall()
return [result['email'] for result in res]
except Exception:
# If the database is inaccessible, fallback to a hard-coded email list
return DEFAULT_DEV_TEAM_EMAILS
| Allow error email to still be sent if DB is down | Allow error email to still be sent if DB is down
We were seeing errors in the logs where the database was inaccessible,
but the errors were not being emailed out because the handler makes a DB query.
| Python | mit | ASCIT/donut,ASCIT/donut,ASCIT/donut |
185f429f2a4309addf446fb382434e1a0ecafb9a | crm_employees/models/crm_employees_range.py | crm_employees/models/crm_employees_range.py | # -*- encoding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in root directory
##############################################################################
from openerp import models, fields
class CrmEmployeesRange(models.Model):
_name = 'crm.employees_range'
_order = "parent_left"
_parent_order = "name"
_parent_store = True
_description = "Employees range"
name = fields.Char(required=True)
parent_id = fields.Many2one(comodel_name='crm.employees_range')
children = fields.One2many(comodel_name='crm.employees_range',
inverse_name='parent_id')
parent_left = fields.Integer('Parent Left', select=True)
parent_right = fields.Integer('Parent Right', select=True)
| # -*- encoding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in root directory
##############################################################################
from openerp import models, fields
class CrmEmployeesRange(models.Model):
_name = 'crm.employees_range'
_order = "parent_left"
_parent_order = "name"
_parent_store = True
_description = "Employees range"
name = fields.Char(required=True, translate=True)
parent_id = fields.Many2one(comodel_name='crm.employees_range')
children = fields.One2many(comodel_name='crm.employees_range',
inverse_name='parent_id')
parent_left = fields.Integer('Parent Left', select=True)
parent_right = fields.Integer('Parent Right', select=True)
| Set some fields as tranlate | Set some fields as tranlate
| Python | agpl-3.0 | Therp/partner-contact,open-synergy/partner-contact,diagramsoftware/partner-contact,Endika/partner-contact,acsone/partner-contact |
78b62cd865b5c31a17c982b78dc91127ebf54525 | erpnext/patches/may_2012/same_purchase_rate_patch.py | erpnext/patches/may_2012/same_purchase_rate_patch.py | def execute():
import webnotes
gd = webnotes.model.code.get_obj('Global Defaults')
gd.doc.maintain_same_rate = 1
gd.doc.save()
gd.on_update()
| def execute():
import webnotes
from webnotes.model.code import get_obj
gd = get_obj('Global Defaults')
gd.doc.maintain_same_rate = 1
gd.doc.save()
gd.on_update()
| Maintain same rate throughout pur cycle: in global defaults, by default set true | Maintain same rate throughout pur cycle: in global defaults, by default set true
| Python | agpl-3.0 | rohitwaghchaure/digitales_erpnext,gangadhar-kadam/smrterp,pombredanne/erpnext,saurabh6790/test-med-app,gangadharkadam/johnerp,indictranstech/erpnext,hernad/erpnext,gangadhar-kadam/helpdesk-erpnext,gangadhar-kadam/mic-erpnext,mbauskar/Das_Erpnext,hernad/erpnext,Tejal011089/huntercamp_erpnext,saurabh6790/ON-RISAPP,mbauskar/phrerp,gangadhar-kadam/laganerp,gangadhar-kadam/hrerp,pombredanne/erpnext,pawaranand/phrerp,gangadharkadam/contributionerp,mbauskar/phrerp,dieface/erpnext,indictranstech/Das_Erpnext,suyashphadtare/sajil-erp,njmube/erpnext,indictranstech/fbd_erpnext,indictranstech/phrerp,gangadhar-kadam/powapp,njmube/erpnext,saurabh6790/aimobilize-app-backup,gangadhar-kadam/latestchurcherp,Drooids/erpnext,indictranstech/biggift-erpnext,geekroot/erpnext,suyashphadtare/sajil-erp,suyashphadtare/sajil-final-erp,indictranstech/Das_Erpnext,indictranstech/biggift-erpnext,indictranstech/phrerp,gangadhar-kadam/verve_erp,mbauskar/internal-hr,gangadhar-kadam/church-erpnext,gmarke/erpnext,Tejal011089/Medsyn2_app,indictranstech/buyback-erp,gangadhar-kadam/smrterp,Tejal011089/digitales_erpnext,Tejal011089/trufil-erpnext,indictranstech/vestasi-erpnext,gmarke/erpnext,netfirms/erpnext,hatwar/buyback-erpnext,dieface/erpnext,shitolepriya/test-erp,gangadharkadam/contributionerp,mbauskar/sapphire-erpnext,SPKian/Testing2,suyashphadtare/test,sheafferusa/erpnext,fuhongliang/erpnext,gangadharkadam/verveerp,indictranstech/tele-erpnext,saurabh6790/omnisys-app,Tejal011089/paypal_erpnext,mbauskar/omnitech-erpnext,shitolepriya/test-erp,gangadhar-kadam/verve-erp,mbauskar/phrerp,gangadhar-kadam/adb-erp,saurabh6790/omnit-app,MartinEnder/erpnext-de,SPKian/Testing,rohitwaghchaure/GenieManager-erpnext,indictranstech/Das_Erpnext,geekroot/erpnext,gangadharkadam/tailorerp,suyashphadtare/vestasi-erp-jan-end,hanselke/erpnext-1,mahabuber/erpnext,gangadhar-kadam/helpdesk-erpnext,hatwar/Das_erpnext,aruizramon/alec_erpnext,saurabh6790/medsyn-app1,saurabh6790/test_final_med_app,gangadharkadam/v4_erp,indictranstech/trufil-erpnext,anandpdoshi/erpnext,SPKian/Testing2,rohitwaghchaure/New_Theme_Erp,indictranstech/buyback-erp,gsnbng/erpnext,saurabh6790/medsyn-app,saurabh6790/omn-app,sagar30051991/ozsmart-erp,gangadhar-kadam/latestchurcherp,sagar30051991/ozsmart-erp,gangadhar-kadam/mtn-erpnext,Tejal011089/paypal_erpnext,gangadharkadam/office_erp,saurabh6790/med_new_app,netfirms/erpnext,BhupeshGupta/erpnext,Suninus/erpnext,gsnbng/erpnext,gangadhar-kadam/latestchurcherp,Tejal011089/osmosis_erpnext,shitolepriya/test-erp,rohitwaghchaure/digitales_erpnext,gangadhar-kadam/verve_live_erp,ThiagoGarciaAlves/erpnext,ThiagoGarciaAlves/erpnext,gangadharkadam/v5_erp,ShashaQin/erpnext,SPKian/Testing,indictranstech/focal-erpnext,indictranstech/osmosis-erpnext,indictranstech/focal-erpnext,Suninus/erpnext,gangadharkadam/saloon_erp_install,Tejal011089/med2-app,mbauskar/omnitech-demo-erpnext,rohitwaghchaure/New_Theme_Erp,suyashphadtare/gd-erp,meisterkleister/erpnext,saurabh6790/test-med-app,mbauskar/alec_frappe5_erpnext,MartinEnder/erpnext-de,suyashphadtare/vestasi-erp-jan-end,gangadharkadam/v6_erp,gangadhar-kadam/powapp,gangadharkadam/sher,saurabh6790/alert-med-app,mbauskar/Das_Erpnext,BhupeshGupta/erpnext,indictranstech/reciphergroup-erpnext,Tejal011089/osmosis_erpnext,anandpdoshi/erpnext,gangadhar-kadam/verve_test_erp,gangadharkadam/v5_erp,shft117/SteckerApp,rohitwaghchaure/erpnext_smart,gangadhar-kadam/prjapp,geekroot/erpnext,saurabh6790/ON-RISAPP,indictranstech/buyback-erp,gangadharkadam/sterp,tmimori/erpnext,fuhongliang/erpnext,mbauskar/Das_Erpnext,Tejal011089/huntercamp_erpnext,gangadharkadam/saloon_erp,ThiagoGarciaAlves/erpnext,indictranstech/trufil-erpnext,saurabh6790/medapp,suyashphadtare/vestasi-erp-jan-end,saurabh6790/test-erp,indictranstech/fbd_erpnext,gangadharkadam/saloon_erp_install,gangadhar-kadam/laganerp,Tejal011089/digitales_erpnext,fuhongliang/erpnext,SPKian/Testing2,saurabh6790/aimobilize,meisterkleister/erpnext,indictranstech/focal-erpnext,gangadharkadam/saloon_erp,SPKian/Testing,rohitwaghchaure/erpnext-receipher,gangadharkadam/smrterp,gangadharkadam/v5_erp,gangadhar-kadam/sms-erpnext,gangadharkadam/office_erp,hernad/erpnext,mbauskar/sapphire-erpnext,gangadharkadam/saloon_erp_install,saurabh6790/OFF-RISAPP,suyashphadtare/vestasi-update-erp,ShashaQin/erpnext,gangadhar-kadam/laganerp,Tejal011089/osmosis_erpnext,treejames/erpnext,gangadhar-kadam/sms-erpnext,BhupeshGupta/erpnext,mbauskar/omnitech-demo-erpnext,tmimori/erpnext,saurabh6790/medsynaptic1-app,gangadharkadam/vlinkerp,sagar30051991/ozsmart-erp,Tejal011089/trufil-erpnext,rohitwaghchaure/erpnext-receipher,gangadharkadam/sterp,indictranstech/fbd_erpnext,saurabh6790/trufil_app,rohitwaghchaure/GenieManager-erpnext,sheafferusa/erpnext,saurabh6790/med_new_app,indictranstech/phrerp,suyashphadtare/gd-erp,njmube/erpnext,mbauskar/internal-hr,gangadhar-kadam/sapphire_app,Tejal011089/trufil-erpnext,gangadharkadam/vlinkerp,gangadharkadam/tailorerp,indictranstech/tele-erpnext,susuchina/ERPNEXT,Tejal011089/digitales_erpnext,suyashphadtare/sajil-final-erp,gangadharkadam/saloon_erp,MartinEnder/erpnext-de,gangadharkadam/vlinkerp,saurabh6790/med_app_rels,SPKian/Testing,rohitwaghchaure/erpnext_smart,saurabh6790/medsynaptic1-app,gangadhar-kadam/verve_test_erp,rohitwaghchaure/GenieManager-erpnext,gangadharkadam/contributionerp,4commerce-technologies-AG/erpnext,saurabh6790/omnitech-apps,indictranstech/tele-erpnext,hatwar/buyback-erpnext,saurabh6790/medsynaptic-app,susuchina/ERPNEXT,gangadharkadam/v6_erp,indictranstech/osmosis-erpnext,saurabh6790/omnitech-apps,rohitwaghchaure/erpnext_smart,rohitwaghchaure/New_Theme_Erp,saurabh6790/trufil_app,indictranstech/vestasi-erpnext,mbauskar/sapphire-erpnext,hatwar/focal-erpnext,pombredanne/erpnext,gangadharkadam/smrterp,saurabh6790/pow-app,mbauskar/omnitech-erpnext,treejames/erpnext,gangadharkadam/office_erp,dieface/erpnext,indictranstech/trufil-erpnext,hatwar/buyback-erpnext,indictranstech/vestasi-erpnext,saurabh6790/medsyn-app1,gangadharkadam/v4_erp,mbauskar/alec_frappe5_erpnext,netfirms/erpnext,suyashphadtare/vestasi-erp-final,gangadhar-kadam/verve_erp,suyashphadtare/vestasi-update-erp,suyashphadtare/vestasi-erp-final,mahabuber/erpnext,gangadharkadam/letzerp,susuchina/ERPNEXT,suyashphadtare/sajil-final-erp,indictranstech/osmosis-erpnext,treejames/erpnext,hatwar/Das_erpnext,gangadhar-kadam/verve_erp,mahabuber/erpnext,saurabh6790/pow-app,shft117/SteckerApp,indictranstech/phrerp,gangadhar-kadam/verve_erp,mbauskar/sapphire-erpnext,saurabh6790/omnisys-app,suyashphadtare/vestasi-update-erp,gangadhar-kadam/helpdesk-erpnext,Yellowen/Owrang,saurabh6790/aimobilize,gangadhar-kadam/powapp,hatwar/focal-erpnext,saurabh6790/omnit-app,fuhongliang/erpnext,ThiagoGarciaAlves/erpnext,saurabh6790/test-erp,Tejal011089/trufil-erpnext,hernad/erpnext,suyashphadtare/vestasi-erp-1,sheafferusa/erpnext,indictranstech/internal-erpnext,mbauskar/Das_Erpnext,mbauskar/helpdesk-erpnext,gangadhar-kadam/hrerp,Tejal011089/fbd_erpnext,hanselke/erpnext-1,gangadhar-kadam/helpdesk-erpnext,saurabh6790/tru_app_back,indictranstech/tele-erpnext,gsnbng/erpnext,rohitwaghchaure/erpnext-receipher,sheafferusa/erpnext,gangadharkadam/verveerp,suyashphadtare/gd-erp,gangadhar-kadam/nassimapp,gangadhar-kadam/nassimapp,shft117/SteckerApp,gangadhar-kadam/verve_test_erp,rohitwaghchaure/erpnext-receipher,gmarke/erpnext,saurabh6790/OFF-RISAPP,4commerce-technologies-AG/erpnext,indictranstech/erpnext,meisterkleister/erpnext,Tejal011089/med2-app,Tejal011089/Medsyn2_app,suyashphadtare/vestasi-erp-final,mbauskar/omnitech-erpnext,ShashaQin/erpnext,gangadharkadam/v4_erp,mbauskar/phrerp,sagar30051991/ozsmart-erp,indictranstech/erpnext,indictranstech/internal-erpnext,suyashphadtare/vestasi-erp-jan-end,hatwar/focal-erpnext,indictranstech/internal-erpnext,SPKian/Testing2,Drooids/erpnext,hatwar/Das_erpnext,gangadhar-kadam/prjapp,gangadharkadam/sher,Tejal011089/paypal_erpnext,gangadharkadam/vlinkerp,suyashphadtare/vestasi-erp-1,gangadhar-kadam/church-erpnext,indictranstech/erpnext,geekroot/erpnext,Tejal011089/osmosis_erpnext,gangadhar-kadam/verve_live_erp,gangadharkadam/v5_erp,hatwar/buyback-erpnext,suyashphadtare/test,mbauskar/alec_frappe5_erpnext,saurabh6790/alert-med-app,Suninus/erpnext,saurabh6790/med_app_rels,gangadhar-kadam/latestchurcherp,Tejal011089/digitales_erpnext,rohitwaghchaure/digitales_erpnext,gangadhar-kadam/mic-erpnext,indictranstech/reciphergroup-erpnext,indictranstech/trufil-erpnext,gangadhar-kadam/sapphire_app,aruizramon/alec_erpnext,gangadharkadam/saloon_erp_install,saurabh6790/omn-app,indictranstech/Das_Erpnext,anandpdoshi/erpnext,rohitwaghchaure/New_Theme_Erp,meisterkleister/erpnext,mbauskar/omnitech-erpnext,mbauskar/omnitech-demo-erpnext,Aptitudetech/ERPNext,mbauskar/helpdesk-erpnext,gangadhar-kadam/verve_live_erp,suyashphadtare/sajil-erp,shitolepriya/test-erp,mbauskar/helpdesk-erpnext,Tejal011089/fbd_erpnext,hanselke/erpnext-1,saurabh6790/test-erp,gangadharkadam/letzerp,Tejal011089/fbd_erpnext,gangadharkadam/v6_erp,saurabh6790/medsyn-app,gangadhar-kadam/verve-erp,gangadharkadam/verveerp,gangadharkadam/contributionerp,gangadhar-kadam/verve-erp,treejames/erpnext,gsnbng/erpnext,pombredanne/erpnext,gangadharkadam/saloon_erp,indictranstech/fbd_erpnext,indictranstech/biggift-erpnext,gangadhar-kadam/verve_test_erp,aruizramon/alec_erpnext,suyashphadtare/test,mbauskar/helpdesk-erpnext,4commerce-technologies-AG/erpnext,Drooids/erpnext,saurabh6790/test_final_med_app,shft117/SteckerApp,netfirms/erpnext,gangadharkadam/letzerp,mbauskar/internal-hr,saurabh6790/omni-apps,tmimori/erpnext,pawaranand/phrerp,hanselke/erpnext-1,indictranstech/osmosis-erpnext,njmube/erpnext,gmarke/erpnext,Tejal011089/fbd_erpnext,saurabh6790/test-erp,BhupeshGupta/erpnext,gangadhar-kadam/sapphire_app,Yellowen/Owrang,susuchina/ERPNEXT,indictranstech/internal-erpnext,hatwar/focal-erpnext,gangadharkadam/johnerp,indictranstech/biggift-erpnext,gangadharkadam/v6_erp,saurabh6790/aimobilize-app-backup,gangadharkadam/letzerp,Tejal011089/huntercamp_erpnext,saurabh6790/tru_app_back,saurabh6790/omni-apps,Drooids/erpnext,indictranstech/vestasi-erpnext,suyashphadtare/gd-erp,pawaranand/phrerp,Tejal011089/huntercamp_erpnext,dieface/erpnext,Suninus/erpnext,Tejal011089/paypal_erpnext,tmimori/erpnext,saurabh6790/medapp,indictranstech/buyback-erp,ShashaQin/erpnext,pawaranand/phrerp,indictranstech/focal-erpnext,indictranstech/reciphergroup-erpnext,indictranstech/reciphergroup-erpnext,hatwar/Das_erpnext,mbauskar/omnitech-demo-erpnext,gangadharkadam/v4_erp,MartinEnder/erpnext-de,anandpdoshi/erpnext,suyashphadtare/vestasi-erp-1,rohitwaghchaure/GenieManager-erpnext,mahabuber/erpnext,gangadhar-kadam/adb-erp,gangadhar-kadam/mtn-erpnext,gangadhar-kadam/verve_live_erp,aruizramon/alec_erpnext,gangadharkadam/verveerp,mbauskar/alec_frappe5_erpnext,rohitwaghchaure/digitales_erpnext,saurabh6790/medsynaptic-app |
840af484f3b0f615167adf9600263e0d8c2e3875 | wrappers/python/setup.py | wrappers/python/setup.py | from distutils.core import setup
import os
PKG_VERSION = os.environ.get('PACKAGE_VERSION') or '1.9.0'
setup(
name='python3-indy',
version=PKG_VERSION,
packages=['indy'],
url='https://github.com/hyperledger/indy-sdk',
license='MIT/Apache-2.0',
author='Vyacheslav Gudkov',
author_email='vyacheslav.gudkov@dsr-company.com',
description='This is the official SDK for Hyperledger Indy (https://www.hyperledger.org/projects), which provides a distributed-ledger-based foundation for self-sovereign identity (https://sovrin.org). The major artifact of the SDK is a c-callable library.',
install_requires=['pytest<3.7', 'pytest-asyncio', 'base58'],
tests_require=['pytest<3.7', 'pytest-asyncio', 'base58']
)
| from distutils.core import setup
import os
PKG_VERSION = os.environ.get('PACKAGE_VERSION') or '1.9.0'
setup(
name='python3-indy',
version=PKG_VERSION,
packages=['indy'],
url='https://github.com/hyperledger/indy-sdk',
license='MIT/Apache-2.0',
author='Vyacheslav Gudkov',
author_email='vyacheslav.gudkov@dsr-company.com',
description='This is the official SDK for Hyperledger Indy (https://www.hyperledger.org/projects), which provides a distributed-ledger-based foundation for self-sovereign identity (https://sovrin.org). The major artifact of the SDK is a c-callable library.',
install_requires=['base58'],
tests_require=['pytest<3.7', 'pytest-asyncio', 'base58']
)
| Remove install dependency of pytest from python wrapper | Remove install dependency of pytest from python wrapper
Signed-off-by: Daniel Bluhm <6df8625bb799b640110458f819853f591a9910cb@sovrin.org>
| Python | apache-2.0 | Artemkaaas/indy-sdk,Artemkaaas/indy-sdk,peacekeeper/indy-sdk,peacekeeper/indy-sdk,Artemkaaas/indy-sdk,peacekeeper/indy-sdk,peacekeeper/indy-sdk,peacekeeper/indy-sdk,Artemkaaas/indy-sdk,peacekeeper/indy-sdk,Artemkaaas/indy-sdk,peacekeeper/indy-sdk,Artemkaaas/indy-sdk,peacekeeper/indy-sdk,Artemkaaas/indy-sdk,peacekeeper/indy-sdk,peacekeeper/indy-sdk,Artemkaaas/indy-sdk,Artemkaaas/indy-sdk,Artemkaaas/indy-sdk,Artemkaaas/indy-sdk,peacekeeper/indy-sdk,Artemkaaas/indy-sdk,peacekeeper/indy-sdk,Artemkaaas/indy-sdk,peacekeeper/indy-sdk |
c95c3e3c505ca46e62fc87690f36326c5579887b | overextends/models.py | overextends/models.py |
# This app doesn't contain any models, but as its template tags need to
# be added to built-ins at start-up time, this is a good place to do it.
from django.template.loader import add_to_builtins
add_to_builtins("overextends.templatetags.overextends_tags")
|
# This app doesn't contain any models, but as its template tags need to
# be added to built-ins at start-up time, this is a good place to do it.
from django.template.base import add_to_builtins
add_to_builtins("overextends.templatetags.overextends_tags")
| Fix import path of add_to_builtins | Fix import path of add_to_builtins | Python | bsd-2-clause | dwaynebailey/django-overextends,stephenmcd/django-overextends,vinnyrose/django-overextends,biljettshop/django-overextends,taedori81/django-overextends |
7b8a92658ab91e6da548a440f6fa5f6bd8eb85e5 | yatsm/pipeline/_exec.py | yatsm/pipeline/_exec.py | """ Functions for handling the execution of a pipeline graph
"""
from toolz.curried import curry
| """ Functions for handling the execution of a pipeline graph
"""
import logging
from toolz import curry
from dask import delayed
from ._topology import config_to_tasks
from .language import OUTPUT, REQUIRE
from .tasks import pipeline_tasks
logger = logging.getLogger(__name__)
def curry_pipeline_task(func, spec):
return curry(func,
**{REQUIRE: spec[REQUIRE],
OUTPUT: spec[OUTPUT],
'config': spec.get('config', {})})
def setup_pipeline(config, pipe):
""" Process the configuration for a YATSM pipeline
Args:
config (dict): Pipeline configuration dictionary
pipe (dict[str: dict]): Dictionary storing ``data`` and ``record``
information. At this point, ``data`` and ``record`` can either
store actual data (e.g., an `xarray.Dataset`) or simply a
dictionary that mimics the data (i.e., it contains the same keys).
Returns:
list: List of curried, delayed functions ready to be ran in a pipeline
"""
tasks = config_to_tasks(config, pipe)
pipeline = []
for task in tasks:
# TODO: curry & delay these
try:
func = pipeline_tasks[config[task]['task']]
except KeyError as exc:
logger.error('Unknown pipeline task "{}" referenced in "{}"'
.format(config[task]['task'], task))
raise
pipeline.append(curry_pipeline_task(func, config[task]))
return pipeline
def delay_pipeline(pipeline, pipe):
""" Return a ``dask.delayed`` pipeline ready to execute
Args:
pipeline (list[callable]): A list of curried functions ready to be
run using data from ``pipe``. This list may be constructed as the
output of :ref:`setup_pipeline`, for example.
pipe (dict): Dictionary storing ``data`` and ``record`` information.
Returns:
dask.delayed: A delayed pipeline ready to be executed
"""
_pipeline = delayed(pipeline[0])(pipe)
for task in pipeline[1:]:
_pipeline = delayed(task)(_pipeline)
return _pipeline
| Add funcs to prepare pipeline execution | Add funcs to prepare pipeline execution
| Python | mit | c11/yatsm,c11/yatsm,valpasq/yatsm,valpasq/yatsm |
52240834fc2144327094f1f9d319184dcccb3da7 | framework/tasks/handlers.py | framework/tasks/handlers.py | # -*- coding: utf-8 -*-
import logging
from flask import g
from celery import group
from website import settings
logger = logging.getLogger(__name__)
def celery_before_request():
g._celery_tasks = []
def celery_teardown_request(error=None):
if error is not None:
return
try:
tasks = g._celery_tasks
if tasks:
group(*tasks)()
except AttributeError:
if not settings.DEBUG_MODE:
logger.error('Task queue not initialized')
def enqueue_task(signature):
if signature not in g._celery_tasks:
g._celery_tasks.append(signature)
handlers = {
'before_request': celery_before_request,
'teardown_request': celery_teardown_request,
}
| # -*- coding: utf-8 -*-
import logging
from flask import g
from celery import group
from website import settings
logger = logging.getLogger(__name__)
def celery_before_request():
g._celery_tasks = []
def celery_teardown_request(error=None):
if error is not None:
return
try:
tasks = g._celery_tasks
if tasks:
group(*tasks)()
except AttributeError:
if not settings.DEBUG_MODE:
logger.error('Task queue not initialized')
def enqueue_task(signature):
"""If working in a request context, push task signature to ``g`` to run
after request is complete; else run signature immediately.
:param signature: Celery task signature
"""
try:
if signature not in g._celery_tasks:
g._celery_tasks.append(signature)
except RuntimeError:
signature()
handlers = {
'before_request': celery_before_request,
'teardown_request': celery_teardown_request,
}
| Handle queued tasks when working outside request context. | Handle queued tasks when working outside request context.
| Python | apache-2.0 | chrisseto/osf.io,billyhunt/osf.io,mluke93/osf.io,ckc6cz/osf.io,billyhunt/osf.io,haoyuchen1992/osf.io,CenterForOpenScience/osf.io,brandonPurvis/osf.io,Ghalko/osf.io,asanfilippo7/osf.io,hmoco/osf.io,adlius/osf.io,reinaH/osf.io,baylee-d/osf.io,RomanZWang/osf.io,chrisseto/osf.io,dplorimer/osf,TomBaxter/osf.io,HalcyonChimera/osf.io,fabianvf/osf.io,cldershem/osf.io,zamattiac/osf.io,ZobairAlijan/osf.io,sbt9uc/osf.io,mluo613/osf.io,kushG/osf.io,KAsante95/osf.io,alexschiller/osf.io,RomanZWang/osf.io,TomBaxter/osf.io,zamattiac/osf.io,Johnetordoff/osf.io,barbour-em/osf.io,reinaH/osf.io,Johnetordoff/osf.io,mluo613/osf.io,ckc6cz/osf.io,wearpants/osf.io,mfraezz/osf.io,laurenrevere/osf.io,lyndsysimon/osf.io,dplorimer/osf,KAsante95/osf.io,adlius/osf.io,acshi/osf.io,haoyuchen1992/osf.io,jnayak1/osf.io,zachjanicki/osf.io,cslzchen/osf.io,crcresearch/osf.io,CenterForOpenScience/osf.io,kushG/osf.io,jolene-esposito/osf.io,samanehsan/osf.io,jeffreyliu3230/osf.io,cldershem/osf.io,himanshuo/osf.io,caneruguz/osf.io,wearpants/osf.io,revanthkolli/osf.io,baylee-d/osf.io,dplorimer/osf,amyshi188/osf.io,mluo613/osf.io,mfraezz/osf.io,petermalcolm/osf.io,hmoco/osf.io,asanfilippo7/osf.io,Ghalko/osf.io,aaxelb/osf.io,samchrisinger/osf.io,binoculars/osf.io,zachjanicki/osf.io,jnayak1/osf.io,caseyrygt/osf.io,acshi/osf.io,ticklemepierce/osf.io,TomHeatwole/osf.io,billyhunt/osf.io,lyndsysimon/osf.io,adlius/osf.io,Johnetordoff/osf.io,kwierman/osf.io,leb2dg/osf.io,himanshuo/osf.io,emetsger/osf.io,monikagrabowska/osf.io,arpitar/osf.io,petermalcolm/osf.io,sloria/osf.io,abought/osf.io,brianjgeiger/osf.io,jinluyuan/osf.io,RomanZWang/osf.io,mattclark/osf.io,asanfilippo7/osf.io,danielneis/osf.io,lamdnhan/osf.io,cslzchen/osf.io,felliott/osf.io,CenterForOpenScience/osf.io,jmcarp/osf.io,jnayak1/osf.io,zachjanicki/osf.io,HarryRybacki/osf.io,haoyuchen1992/osf.io,DanielSBrown/osf.io,caseyrygt/osf.io,zachjanicki/osf.io,adlius/osf.io,baylee-d/osf.io,aaxelb/osf.io,KAsante95/osf.io,CenterForOpenScience/osf.io,caseyrollins/osf.io,wearpants/osf.io,doublebits/osf.io,GaryKriebel/osf.io,zkraime/osf.io,jnayak1/osf.io,kwierman/osf.io,ckc6cz/osf.io,fabianvf/osf.io,doublebits/osf.io,emetsger/osf.io,DanielSBrown/osf.io,cldershem/osf.io,lamdnhan/osf.io,crcresearch/osf.io,cosenal/osf.io,monikagrabowska/osf.io,ticklemepierce/osf.io,abought/osf.io,ticklemepierce/osf.io,brianjgeiger/osf.io,reinaH/osf.io,hmoco/osf.io,emetsger/osf.io,TomHeatwole/osf.io,petermalcolm/osf.io,revanthkolli/osf.io,wearpants/osf.io,jinluyuan/osf.io,mfraezz/osf.io,himanshuo/osf.io,danielneis/osf.io,alexschiller/osf.io,samchrisinger/osf.io,jolene-esposito/osf.io,AndrewSallans/osf.io,cwisecarver/osf.io,rdhyee/osf.io,caseyrollins/osf.io,HarryRybacki/osf.io,sloria/osf.io,cldershem/osf.io,binoculars/osf.io,zamattiac/osf.io,pattisdr/osf.io,dplorimer/osf,billyhunt/osf.io,amyshi188/osf.io,arpitar/osf.io,samanehsan/osf.io,TomBaxter/osf.io,njantrania/osf.io,doublebits/osf.io,mfraezz/osf.io,mluo613/osf.io,njantrania/osf.io,amyshi188/osf.io,barbour-em/osf.io,acshi/osf.io,amyshi188/osf.io,samanehsan/osf.io,cslzchen/osf.io,hmoco/osf.io,monikagrabowska/osf.io,erinspace/osf.io,ticklemepierce/osf.io,acshi/osf.io,icereval/osf.io,HarryRybacki/osf.io,cosenal/osf.io,RomanZWang/osf.io,GaryKriebel/osf.io,Nesiehr/osf.io,mattclark/osf.io,revanthkolli/osf.io,laurenrevere/osf.io,zkraime/osf.io,kwierman/osf.io,brianjgeiger/osf.io,barbour-em/osf.io,brandonPurvis/osf.io,felliott/osf.io,SSJohns/osf.io,kch8qx/osf.io,samchrisinger/osf.io,Ghalko/osf.io,kwierman/osf.io,rdhyee/osf.io,jolene-esposito/osf.io,MerlinZhang/osf.io,abought/osf.io,Nesiehr/osf.io,himanshuo/osf.io,DanielSBrown/osf.io,AndrewSallans/osf.io,RomanZWang/osf.io,mluke93/osf.io,petermalcolm/osf.io,MerlinZhang/osf.io,samanehsan/osf.io,emetsger/osf.io,mattclark/osf.io,alexschiller/osf.io,brandonPurvis/osf.io,chennan47/osf.io,acshi/osf.io,mluke93/osf.io,DanielSBrown/osf.io,kch8qx/osf.io,jinluyuan/osf.io,cslzchen/osf.io,lyndsysimon/osf.io,doublebits/osf.io,GageGaskins/osf.io,caseyrollins/osf.io,lamdnhan/osf.io,caneruguz/osf.io,SSJohns/osf.io,fabianvf/osf.io,Ghalko/osf.io,aaxelb/osf.io,MerlinZhang/osf.io,caseyrygt/osf.io,cosenal/osf.io,kch8qx/osf.io,GageGaskins/osf.io,GageGaskins/osf.io,TomHeatwole/osf.io,chrisseto/osf.io,leb2dg/osf.io,njantrania/osf.io,lyndsysimon/osf.io,fabianvf/osf.io,mluke93/osf.io,sloria/osf.io,lamdnhan/osf.io,SSJohns/osf.io,ZobairAlijan/osf.io,bdyetton/prettychart,brianjgeiger/osf.io,saradbowman/osf.io,ckc6cz/osf.io,ZobairAlijan/osf.io,SSJohns/osf.io,cosenal/osf.io,HarryRybacki/osf.io,kch8qx/osf.io,crcresearch/osf.io,MerlinZhang/osf.io,mluo613/osf.io,brandonPurvis/osf.io,felliott/osf.io,brandonPurvis/osf.io,jmcarp/osf.io,jmcarp/osf.io,KAsante95/osf.io,sbt9uc/osf.io,chennan47/osf.io,leb2dg/osf.io,zamattiac/osf.io,billyhunt/osf.io,monikagrabowska/osf.io,caneruguz/osf.io,bdyetton/prettychart,barbour-em/osf.io,rdhyee/osf.io,ZobairAlijan/osf.io,Nesiehr/osf.io,cwisecarver/osf.io,HalcyonChimera/osf.io,arpitar/osf.io,danielneis/osf.io,HalcyonChimera/osf.io,GageGaskins/osf.io,bdyetton/prettychart,doublebits/osf.io,sbt9uc/osf.io,jolene-esposito/osf.io,bdyetton/prettychart,zkraime/osf.io,erinspace/osf.io,zkraime/osf.io,leb2dg/osf.io,alexschiller/osf.io,GaryKriebel/osf.io,alexschiller/osf.io,danielneis/osf.io,saradbowman/osf.io,icereval/osf.io,sbt9uc/osf.io,Nesiehr/osf.io,samchrisinger/osf.io,Johnetordoff/osf.io,GageGaskins/osf.io,arpitar/osf.io,GaryKriebel/osf.io,kushG/osf.io,jeffreyliu3230/osf.io,cwisecarver/osf.io,caneruguz/osf.io,KAsante95/osf.io,icereval/osf.io,haoyuchen1992/osf.io,caseyrygt/osf.io,cwisecarver/osf.io,binoculars/osf.io,kch8qx/osf.io,abought/osf.io,aaxelb/osf.io,TomHeatwole/osf.io,kushG/osf.io,pattisdr/osf.io,njantrania/osf.io,pattisdr/osf.io,monikagrabowska/osf.io,HalcyonChimera/osf.io,jeffreyliu3230/osf.io,revanthkolli/osf.io,rdhyee/osf.io,jinluyuan/osf.io,erinspace/osf.io,reinaH/osf.io,jeffreyliu3230/osf.io,laurenrevere/osf.io,asanfilippo7/osf.io,jmcarp/osf.io,chrisseto/osf.io,felliott/osf.io,chennan47/osf.io |
061e0e0702025d99956b7dc606ea0bb4fa5c84ea | flocker/restapi/_logging.py | flocker/restapi/_logging.py | # Copyright Hybrid Logic Ltd. See LICENSE file for details.
"""
This module defines the Eliot log events emitted by the API implementation.
"""
__all__ = [
"JSON_REQUEST",
"REQUEST",
]
from eliot import Field, ActionType
LOG_SYSTEM = u"api"
METHOD = Field(u"method", lambda method: method,
u"The HTTP method of the request.")
REQUEST_PATH = Field(
u"request_path", lambda path: path,
u"The absolute path of the resource to which the request was issued.")
JSON = Field.forTypes(
u"json", [unicode, bytes, dict, list, None, bool, float],
u"JSON, either request or response depending on context.")
RESPONSE_CODE = Field.forTypes(
u"code", [int],
u"The response code for the request.")
REQUEST = ActionType(
LOG_SYSTEM + u":request",
[REQUEST_PATH, METHOD],
[],
u"A request was received on the public HTTP interface.")
JSON_REQUEST = ActionType(
LOG_SYSTEM + u":json_request",
[JSON],
[RESPONSE_CODE, JSON],
u"A request containing JSON request and response.")
| # Copyright Hybrid Logic Ltd. See LICENSE file for details.
"""
This module defines the Eliot log events emitted by the API implementation.
"""
__all__ = [
"JSON_REQUEST",
"REQUEST",
]
from eliot import Field, ActionType
LOG_SYSTEM = u"api"
METHOD = Field(u"method", lambda method: method,
u"The HTTP method of the request.")
REQUEST_PATH = Field(
u"request_path", lambda path: path,
u"The absolute path of the resource to which the request was issued.")
JSON = Field.forTypes(
u"json", [unicode, bytes, dict, list, None, bool, float],
u"JSON, either request or response depending on context.")
RESPONSE_CODE = Field.forTypes(
u"code", [int],
u"The response code for the request.")
# It would be nice if RESPONSE_CODE was in REQUEST instead of
# JSON_REQUEST; see FLOC-1586.
REQUEST = ActionType(
LOG_SYSTEM + u":request",
[REQUEST_PATH, METHOD],
[],
u"A request was received on the public HTTP interface.")
JSON_REQUEST = ActionType(
LOG_SYSTEM + u":json_request",
[JSON],
[RESPONSE_CODE, JSON],
u"A request containing JSON request and response bodies.")
| Address review comment: Better documentation. | Address review comment: Better documentation.
| Python | apache-2.0 | Azulinho/flocker,moypray/flocker,wallnerryan/flocker-profiles,mbrukman/flocker,w4ngyi/flocker,adamtheturtle/flocker,runcom/flocker,mbrukman/flocker,LaynePeng/flocker,achanda/flocker,jml/flocker,hackday-profilers/flocker,AndyHuu/flocker,jml/flocker,lukemarsden/flocker,LaynePeng/flocker,achanda/flocker,1d4Nf6/flocker,Azulinho/flocker,hackday-profilers/flocker,moypray/flocker,w4ngyi/flocker,1d4Nf6/flocker,hackday-profilers/flocker,lukemarsden/flocker,AndyHuu/flocker,Azulinho/flocker,1d4Nf6/flocker,mbrukman/flocker,agonzalezro/flocker,LaynePeng/flocker,wallnerryan/flocker-profiles,adamtheturtle/flocker,w4ngyi/flocker,moypray/flocker,lukemarsden/flocker,wallnerryan/flocker-profiles,agonzalezro/flocker,adamtheturtle/flocker,achanda/flocker,runcom/flocker,jml/flocker,agonzalezro/flocker,AndyHuu/flocker,runcom/flocker |
397b48e9837cd66b04cc77c08fb569ca97e935c1 | tests/test_main.py | tests/test_main.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_main
---------
Tests for `cookiecutter.main` module.
"""
import logging
import unittest
from cookiecutter import main
# Log debug and above to console
logging.basicConfig(format='%(levelname)s: %(message)s', level=logging.DEBUG)
class TestAbbreviationExpansion(unittest.TestCase):
def test_abbreviation_expansion(self):
template = main.expand_abbreviations('foo', {'abbreviations': {'foo': 'bar'}})
self.assertEqual(template, 'bar')
def test_abbreviation_expansion_not_an_abbreviation(self):
template = main.expand_abbreviations('baz', {'abbreviations': {'foo': 'bar'}})
self.assertEqual(template, 'baz')
def test_abbreviation_expansion_prefix(self):
template = main.expand_abbreviations('xx:a', {'abbreviations': {'xx': '<{0}>'}})
self.assertEqual(template, '<a>')
def test_abbreviation_expansion_builtin(self):
template = main.expand_abbreviations('gh:a', {})
self.assertEqual(template, 'https://github.com/a.git')
def test_abbreviation_expansion_override_builtin(self):
template = main.expand_abbreviations('gh:a', {'abbreviations': {'gh': '<{0}>'}})
self.assertEqual(template, '<a>')
def test_abbreviation_expansion_prefix_ignores_suffix(self):
template = main.expand_abbreviations('xx:a', {'abbreviations': {'xx': '<>'}})
self.assertEqual(template, '<>')
def test_abbreviation_expansion_prefix_not_0_in_braces(self):
self.assertRaises(
IndexError,
main.expand_abbreviations,
'xx:a',
{'abbreviations': {'xx': '{1}'}}
)
if __name__ == '__main__':
unittest.main()
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_main
---------
Tests for `cookiecutter.main` module.
"""
import logging
import unittest
from cookiecutter import main
# Log debug and above to console
logging.basicConfig(format='%(levelname)s: %(message)s', level=logging.DEBUG)
if __name__ == '__main__':
unittest.main()
| Remove original implementation of TestAbbreviationExpansion | Remove original implementation of TestAbbreviationExpansion
| Python | bsd-3-clause | audreyr/cookiecutter,takeflight/cookiecutter,drgarcia1986/cookiecutter,cguardia/cookiecutter,terryjbates/cookiecutter,stevepiercy/cookiecutter,benthomasson/cookiecutter,willingc/cookiecutter,ionelmc/cookiecutter,vincentbernat/cookiecutter,vincentbernat/cookiecutter,vintasoftware/cookiecutter,lucius-feng/cookiecutter,lgp171188/cookiecutter,lgp171188/cookiecutter,luzfcb/cookiecutter,janusnic/cookiecutter,christabor/cookiecutter,Vauxoo/cookiecutter,sp1rs/cookiecutter,nhomar/cookiecutter,moi65/cookiecutter,foodszhang/cookiecutter,tylerdave/cookiecutter,luzfcb/cookiecutter,sp1rs/cookiecutter,foodszhang/cookiecutter,atlassian/cookiecutter,terryjbates/cookiecutter,hackebrot/cookiecutter,benthomasson/cookiecutter,lucius-feng/cookiecutter,michaeljoseph/cookiecutter,cichm/cookiecutter,agconti/cookiecutter,christabor/cookiecutter,jhermann/cookiecutter,atlassian/cookiecutter,Springerle/cookiecutter,drgarcia1986/cookiecutter,janusnic/cookiecutter,0k/cookiecutter,hackebrot/cookiecutter,ramiroluz/cookiecutter,venumech/cookiecutter,dajose/cookiecutter,cguardia/cookiecutter,takeflight/cookiecutter,audreyr/cookiecutter,stevepiercy/cookiecutter,moi65/cookiecutter,cichm/cookiecutter,dajose/cookiecutter,agconti/cookiecutter,Springerle/cookiecutter,kkujawinski/cookiecutter,nhomar/cookiecutter,ionelmc/cookiecutter,michaeljoseph/cookiecutter,Vauxoo/cookiecutter,ramiroluz/cookiecutter,vintasoftware/cookiecutter,0k/cookiecutter,willingc/cookiecutter,kkujawinski/cookiecutter,pjbull/cookiecutter,tylerdave/cookiecutter,venumech/cookiecutter,pjbull/cookiecutter,jhermann/cookiecutter |
d2536523770a59ed60bf27e8c0e456a33ca1a804 | billabong/tests/test_main.py | billabong/tests/test_main.py | # Copyright (c) 2015 "Hugo Herter http://hugoherter.com"
#
# This file is part of Billabong.
#
# Intercom is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Test CLI interface."""
import os
from .fixtures import record
assert record
def run(cmd):
"""Helper to test running a CLI command."""
os.system('python -m billabong ' + cmd)
def test_cli(record):
"""Test main supported CLI commands."""
ID = record['id']
run('ls')
run('blobs')
run('info ' + ID)
run('search txt')
run('check')
run('push')
run('pull')
run('echo ' + ID)
run('status')
run('version')
| # Copyright (c) 2015 "Hugo Herter http://hugoherter.com"
#
# This file is part of Billabong.
#
# Intercom is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Test CLI interface."""
import os
from .fixtures import record
assert record
def run(cmd):
"""Helper to test running a CLI command."""
os.system('python -m billabong ' + cmd)
def test_cli(record):
"""Test main supported CLI commands."""
ID = record['id']
run('ls')
run('records')
run('blobs')
run('info ' + ID)
run('info ' + ID + ' --no-color')
run('search txt')
run('check')
run('push')
run('pull')
run('echo ' + ID)
run('status')
run('version')
run('add hello.txt')
| Add test for cli 'add' command | Add test for cli 'add' command
| Python | agpl-3.0 | hoh/Billabong,hoh/Billabong |
32ca774aca8fd60a26f6144a98f25fa8b65ad22b | yak/rest_social_auth/serializers.py | yak/rest_social_auth/serializers.py | from django.contrib.auth import get_user_model
from rest_framework import serializers
from yak.rest_user.serializers import SignUpSerializer
User = get_user_model()
class SocialSignUpSerializer(SignUpSerializer):
password = serializers.CharField(required=False, write_only=True)
class Meta:
model = User
fields = ('fullname', 'username', 'email', 'password', 'client_id', 'client_secret')
write_only_fields = ('access_token', 'access_token_secret')
read_only_fields = ('fullname', 'username', 'email', 'client_id', 'client_secret')
| from django.contrib.auth import get_user_model
from rest_framework import serializers
from yak.rest_user.serializers import LoginSerializer
User = get_user_model()
class SocialSignUpSerializer(LoginSerializer):
fullname = serializers.CharField(read_only=True)
username = serializers.CharField(read_only=True)
email = serializers.EmailField(read_only=True)
password = serializers.CharField(required=False, write_only=True)
class Meta:
model = User
fields = ('fullname', 'username', 'email', 'password', 'client_id', 'client_secret')
write_only_fields = ('access_token', 'access_token_secret')
| Update social sign up serializer to avoid new validation on regular sign up | Update social sign up serializer to avoid new validation on regular sign up
| Python | mit | ParableSciences/YAK-server,sventech/YAK-server,yeti/YAK-server,sventech/YAK-server,ParableSciences/YAK-server,yeti/YAK-server |
9c58f87648034d706ca5df87d7179226e48afa54 | project/profiles/urls.py | project/profiles/urls.py | """
project.profiles URL Configuration
"""
from django.conf.urls import url
from .views import (profile_list_view,
profile_detail_view,
profile_update_view)
urlpatterns = [
url(r'^profiles/$', profile_list_view, name="profile_list"),
url(r'^profiles/update/$', profile_update_view, name="profile_update"),
url(r'^profile/(?P<username>[-\w]+)/$', profile_detail_view, name="profile_detail"),
]
| """
project.profiles URL Configuration
"""
from django.conf.urls import url
from .views import (profile_list_view,
profile_detail_view,
profile_update_view)
urlpatterns = [
url(r'^profiles/$', profile_list_view, name="profile_list"),
url(r'^profiles/update/$', profile_update_view, name="profile_update"),
url(r'^profile/(?P<username>[\w.@+-]+)/$', profile_detail_view, name="profile_detail"),
]
| Use Django's username regex in profile URL | Use Django's username regex in profile URL
| Python | mit | compsci-hfh/app,jonsimington/app,compsci-hfh/app,jonsimington/app |
a42b6d1faa38f92b21d74c1cf258f4b0e9800401 | search/urls.py | search/urls.py | from django.conf.urls import patterns, url
from django.views.generic import TemplateView
from core.auth import perm
import search.views
urlpatterns = patterns('',
url(r'^document/$', perm('any', search.views.DocumentSearchTemplate), name='search'),
url(r'^document/query/$',perm('any', search.views.DocumentSearchQuery), name='search_documents_query'),
url(r'^image/$', perm('user', search.views.ImageSearchTemplate), name='search_images'),
url(r'^image/query/$', perm('user', search.views.SearchImageQuery), name='search_images_query'),
url(r'^social/$', perm('user', TemplateView, template_name='search/search_social.jinja'), name='search_social'),
url(r'^social/query/$', perm('user', search.views.SearchSocialQuery), name='search_social_query'),
)
| from django.conf.urls import patterns, url
from django.views.generic import TemplateView
from core.auth import perm
import search.views
urlpatterns = patterns('',
url(r'^document/$', perm('any', search.views.DocumentSearchTemplate), name='search'),
url(r'^document/query/$',perm('any', search.views.DocumentSearchQuery), name='search_documents_query'),
url(r'^image/$', perm('loggedin', search.views.ImageSearchTemplate), name='search_images'),
url(r'^image/query/$', perm('loggedin', search.views.SearchImageQuery), name='search_images_query'),
url(r'^social/$', perm('user', TemplateView, template_name='search/search_social.jinja'), name='search_social'),
url(r'^social/query/$', perm('user', search.views.SearchSocialQuery), name='search_social_query'),
)
| Allow any logged-in user to perform image searches. | Allow any logged-in user to perform image searches.
| Python | mit | occrp/id-backend |
afc0ace0767e29f8c2b71ed5ba7f8139e24fc020 | categories/serializers.py | categories/serializers.py | from .models import Category, Keyword, Subcategory
from rest_framework import serializers
class CategorySerializer(serializers.ModelSerializer):
class Meta:
model = Category
fields = ('pk', 'name', 'weight', 'comment_required')
class KeywordSerializer(serializers.ModelSerializer):
class Meta:
model = Keyword
fields = ('pk', 'name')
class KeywordListSerializer(serializers.ModelSerializer):
class Meta:
model = Keyword
fields = ('pk', 'name')
class SubcategoryDetailSerializer(serializers.ModelSerializer):
class Meta:
model = Subcategory
depth = 1
fields = ('pk', 'name', 'category')
class SubcategoryListSerializer(serializers.ModelSerializer):
class Meta:
model = Subcategory
fields = ('pk', 'name')
| from .models import Category, Keyword, Subcategory
from rest_framework import serializers
class KeywordSerializer(serializers.ModelSerializer):
class Meta:
model = Keyword
fields = ('pk', 'name')
class KeywordListSerializer(serializers.ModelSerializer):
class Meta:
model = Keyword
fields = ('pk', 'name')
class SubcategoryDetailSerializer(serializers.ModelSerializer):
class Meta:
model = Subcategory
depth = 1
fields = ('pk', 'name', 'category')
class SubcategoryListSerializer(serializers.ModelSerializer):
class Meta:
model = Subcategory
fields = ('pk', 'name')
class CategorySerializer(serializers.ModelSerializer):
subcategories = SubcategoryListSerializer(many=True, source='subcategory_set')
class Meta:
model = Category
fields = ('pk', 'name', 'weight', 'comment_required', 'subcategories')
| Add reverse relationship serializer to Category | Add reverse relationship serializer to Category
| Python | apache-2.0 | belatrix/BackendAllStars |
8e9889bb9c2d916f61e5e08416a171777f1c6a2e | samples/gpio_write.py | samples/gpio_write.py | import asyncio
import apigpio
LED_GPIO = 21
@asyncio.coroutine
def start_blink(pi, address):
yield from pi.connect(address)
# running this in this order blocks :(
# only in run, when debuging it does not block...
# blocks on set_mode for the second gpio
yield from pi.set_mode(LED_GPIO, apigpio.OUTPUT)
while True:
yield from pi.write(LED_GPIO, 0)
yield from asyncio.sleep(1)
yield from pi.write(LED_GPIO, 1)
yield from asyncio.sleep(1)
if __name__ == '__main__':
loop = asyncio.get_event_loop()
pi = apigpio.Pi(loop)
address = ('192.168.1.3', 8888)
loop.run_until_complete(start_blink(pi, address))
| import asyncio
import apigpio
LED_GPIO = 21
@asyncio.coroutine
def start_blink(pi, address):
yield from pi.connect(address)
yield from pi.set_mode(LED_GPIO, apigpio.OUTPUT)
while True:
yield from pi.write(LED_GPIO, 0)
yield from asyncio.sleep(1)
yield from pi.write(LED_GPIO, 1)
yield from asyncio.sleep(1)
if __name__ == '__main__':
loop = asyncio.get_event_loop()
pi = apigpio.Pi(loop)
address = ('192.168.1.3', 8888)
loop.run_until_complete(start_blink(pi, address))
| Remove wrong comments on samples. | Remove wrong comments on samples.
| Python | mit | PierreRust/apigpio |
8373afd33efc7594d378e819705fe38f6c1cca57 | src/nyc_trees/apps/core/templatetags/utils.py | src/nyc_trees/apps/core/templatetags/utils.py | # -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
import re
from django import template
from django.conf import settings
register = template.Library()
_remove_slash_re = re.compile(r'/+')
def _urljoin(*args):
"""Joins relative URLs, collapsing consecutive '/'"""
url = "/".join(args)
return _remove_slash_re.sub('/', url)
@register.filter
def static_url(static_file):
if settings.DEBUG:
return _urljoin(settings.STATIC_URL, static_file)
static_file_mapping = settings.STATIC_FILES_MAPPING
if static_file not in static_file_mapping:
raise Exception('Static file %s not found in rev-manifest.json, '
'did you forget to run "npm run build"?' % static_file)
return _urljoin(settings.STATIC_URL, static_file_mapping[static_file])
| # -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
import re
from django import template
from django.conf import settings
register = template.Library()
_remove_slash_re = re.compile(r'/+')
def _urljoin(*args):
"""Joins relative URLs, collapsing consecutive '/'"""
url = "/".join(args)
return _remove_slash_re.sub('/', url)
@register.filter
def static_url(static_file):
if settings.DEBUG:
return _urljoin(settings.STATIC_URL, static_file)
static_file_mapping = settings.STATIC_FILES_MAPPING
if static_file not in static_file_mapping:
# !!! WARNING !!! this may cause your templates to silently fail
# If template A includes template B and template B has uses this
# templatetag and results in this exception, template B will be
# rendered blank inside of template A, instead of crashing.
raise Exception('Static file %s not found in rev-manifest.json, '
'did you forget to run "npm run build"?' % static_file)
return _urljoin(settings.STATIC_URL, static_file_mapping[static_file])
| Add warning message to tricky template tag | Add warning message to tricky template tag
| Python | agpl-3.0 | maurizi/nyc-trees,azavea/nyc-trees,maurizi/nyc-trees,kdeloach/nyc-trees,azavea/nyc-trees,kdeloach/nyc-trees,azavea/nyc-trees,kdeloach/nyc-trees,kdeloach/nyc-trees,maurizi/nyc-trees,kdeloach/nyc-trees,azavea/nyc-trees,maurizi/nyc-trees,azavea/nyc-trees |
b25e28c4ee5b81436547ec1cde8d7f471352f08e | config/production-example.py | config/production-example.py | # Exemplary production configuration
import os
# Enable this if you want a tool like Sentry
# handle exceptions rather than Flask.
PROPAGATE_EXCEPTIONS = False
# Set a custom secret key before running in production!
# To generate one:
# $ python -c 'import secrets; print(secrets.token_hex())'
#SECRET_KEY = ''
# TODO: Adjust `SQLALCHEMY_DATABASE_URI`!
SQLALCHEMY_DATABASE_URI = 'postgresql+psycopg2://byceps:boioioing@127.0.0.1/byceps'
REDIS_URL = 'redis://127.0.0.1:6379/0'
# Or, if you want to access Redis via unix socket instead:
#REDIS_URL = 'unix:///var/run/redis/redis.sock?db=0'
APP_MODE = os.environ.get('APP_MODE')
SITE_ID = os.environ.get('SITE_ID')
| # Exemplary production configuration
import os
# Enable this if you want a tool like Sentry
# handle exceptions rather than Flask.
PROPAGATE_EXCEPTIONS = False
# Set a custom secret key before running in production!
# To generate one:
# $ byceps generate-secret-key
#SECRET_KEY = ''
# TODO: Adjust `SQLALCHEMY_DATABASE_URI`!
SQLALCHEMY_DATABASE_URI = 'postgresql+psycopg2://byceps:boioioing@127.0.0.1/byceps'
REDIS_URL = 'redis://127.0.0.1:6379/0'
# Or, if you want to access Redis via unix socket instead:
#REDIS_URL = 'unix:///var/run/redis/redis.sock?db=0'
APP_MODE = os.environ.get('APP_MODE')
SITE_ID = os.environ.get('SITE_ID')
| Update note in config example on how to generate secret key | Update note in config example on how to generate secret key
| Python | bsd-3-clause | homeworkprod/byceps,homeworkprod/byceps,homeworkprod/byceps |
ffde5305a2182e566384887d51e4fde90adc9908 | runtests.py | runtests.py | #!/usr/bin/env python
import os
import sys
import django
from django.conf import settings
from django.test.utils import get_runner
if __name__ == "__main__":
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.test_settings'
django.setup()
TestRunner = get_runner(settings)
test_runner = TestRunner()
failures = test_runner.run_tests(["tests"])
sys.exit(bool(failures))
| #!/usr/bin/env python
import os
import sys
import django
from django.conf import settings
from django.test.utils import get_runner
if __name__ == "__main__":
tests = "tests" if len(sys.argv) == 1 else sys.argv[1]
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.test_settings'
django.setup()
TestRunner = get_runner(settings)
test_runner = TestRunner()
failures = test_runner.run_tests([tests])
sys.exit(bool(failures))
| Make it possible to run individual tests. | Tests: Make it possible to run individual tests.
| Python | agpl-3.0 | etesync/journal-manager |
d34837d2b059ac72d7db85aac2effee83e8cb41c | runtests.py | runtests.py | #!/usr/bin/env python
try:
import unittest2 as unittest
except ImportError:
import unittest
import atexit
import gc
gc.set_debug(gc.DEBUG_UNCOLLECTABLE)
@atexit.register
def report_uncollectable():
print('uncollectable objects')
for obj in gc.garbage:
print(obj)
if hasattr(obj, '__dict__'):
print(obj.__dict__)
for ref in gc.get_referrers(obj):
print("referrer:", ref)
print('---')
import pymysql.tests
unittest.main(pymysql.tests)
| #!/usr/bin/env python
try:
import unittest2 as unittest
except ImportError:
import unittest
import sys
if not hasattr(sys, 'pypy_version_info'):
import atexit
import gc
gc.set_debug(gc.DEBUG_UNCOLLECTABLE)
@atexit.register
def report_uncollectable():
print('uncollectable objects')
for obj in gc.garbage:
print(obj)
if hasattr(obj, '__dict__'):
print(obj.__dict__)
for ref in gc.get_referrers(obj):
print("referrer:", ref)
print('---')
import pymysql.tests
unittest.main(pymysql.tests)
| Fix tox failed on PyPy | Fix tox failed on PyPy
| Python | mit | PyMySQL/Tornado-MySQL,aio-libs/aiomysql,lzedl/PyMySQL,methane/PyMySQL,boneyao/PyMySQL,pymysql/pymysql,pulsar314/Tornado-MySQL,yeyinzhu3211/PyMySQL,nju520/PyMySQL,eibanez/PyMySQL,jwjohns/PyMySQL,jheld/PyMySQL,modulexcite/PyMySQL,anson-tang/PyMySQL,wraziens/PyMySQL,xjzhou/PyMySQL,MartinThoma/PyMySQL,Geoion/Tornado-MySQL,lzedl/PyMySQL,eibanez/PyMySQL,PyMySQL/PyMySQL,NunoEdgarGub1/PyMySQL,Ting-y/PyMySQL,xjzhou/PyMySQL,mosquito/Tornado-MySQL,DashaChuk/PyMySQL,yeyinzhu3211/PyMySQL,wraziens/PyMySQL |
0362731366e9f3f1bc6d4ddb7a36ea863b46baed | tests/test_invites.py | tests/test_invites.py | def test_find_or_create_invite(logged_rocket):
rid = 'GENERAL'
find_or_create_invite = logged_rocket.find_or_create_invite(rid=rid, days=10, max_uses=20).json()
assert find_or_create_invite.get('success')
assert find_or_create_invite.get('days') == 10
assert find_or_create_invite.get('maxUses') == 20
def test_list_invites(logged_rocket):
list_invites = logged_rocket.list_invites().json()
assert isinstance(list_invites, list)
| def test_find_or_create_invite(logged_rocket):
rid = 'GENERAL'
find_or_create_invite = logged_rocket.find_or_create_invite(rid=rid, days=7, max_uses=5).json()
assert find_or_create_invite.get('success')
assert find_or_create_invite.get('days') == 7
assert find_or_create_invite.get('maxUses') == 5
def test_list_invites(logged_rocket):
list_invites = logged_rocket.list_invites().json()
assert isinstance(list_invites, list)
| Change the test values for valid values. | Change the test values for valid values.
The API is only accepting the same values that are available on the client:
0, 1, 7, 15 or 30 days
0, 1, 5, 10, 25, 50 or 100 uses.
| Python | mit | jadolg/rocketchat_API |
16b2de5a1c4965b1e3a2cb96c6ea3bd847e85c95 | hxl/commands/hxlvalidate.py | hxl/commands/hxlvalidate.py | """
Command function to schema-validate a HXL dataset.
David Megginson
November 2014
Can use a whitelist of HXL tags, a blacklist, or both.
Usage:
import sys
from hxl.scripts.hxlvalidate import hxlvalidate
hxlvalidate(sys.stdin, sys.stdout, open('MySchema.csv', 'r'))
License: Public Domain
Documentation: http://hxlstandard.org
"""
import sys
import argparse
from hxl.parser import HXLReader
from hxl.schema import loadHXLSchema
def hxlvalidate(input, output=sys.stdout, schema_input=None):
parser = HXLReader(input)
schema = loadHXLSchema(schema_input)
schema.validate(parser)
# end
| """
Command function to schema-validate a HXL dataset.
David Megginson
November 2014
Can use a whitelist of HXL tags, a blacklist, or both.
Usage:
import sys
from hxl.scripts.hxlvalidate import hxlvalidate
hxlvalidate(sys.stdin, sys.stdout, open('MySchema.csv', 'r'))
License: Public Domain
Documentation: http://hxlstandard.org
"""
import sys
from hxl.parser import HXLReader
from hxl.schema import loadHXLSchema
def hxlvalidate(input=sys.stdin, output=sys.stderr, schema_input=None):
parser = HXLReader(input)
schema = loadHXLSchema(schema_input)
return schema.validate(parser)
# end
| Return result of validation from the command script. | Return result of validation from the command script.
| Python | unlicense | HXLStandard/libhxl-python,HXLStandard/libhxl-python |
0aec3c1f935bf3faaa959f33025b0c40f9b3efc1 | tests/test_yamlmod.py | tests/test_yamlmod.py | import os
import sys
from nose.tools import *
try:
from importlib import reload
except ImportError:
pass
def setup_yamlmod():
import yamlmod
reload(yamlmod)
def teardown_yamlmod():
import yamlmod
for hook in sys.meta_path:
if isinstance(hook, yamlmod.YamlImportHook):
sys.meta_path.remove(hook)
break
@with_setup(setup_yamlmod, teardown_yamlmod)
def test_import_installs_hook():
import yamlmod
hooks = []
for hook in sys.meta_path:
if isinstance(hook, yamlmod.YamlImportHook):
hooks.append(hook)
eq_(len(hooks), 1, 'did not find exactly one hook')
@with_setup(setup_yamlmod, teardown_yamlmod)
def test_import_fixture():
import fixture
eq_(fixture.debug, True)
eq_(fixture.domain, 'example.com')
eq_(fixture.users, ['alice', 'bob', 'cathy'])
@with_setup(setup_yamlmod, teardown_yamlmod)
def test_hidden_attributes():
import fixture
eq_(fixture.__name__, 'fixture')
eq_(fixture.__file__, os.path.join(os.path.dirname(__file__), 'fixture.yml'))
| import os
import sys
from nose.tools import *
try:
from importlib import reload
except ImportError:
try:
from imp import reload
except ImportError:
pass
def setup_yamlmod():
import yamlmod
reload(yamlmod)
def teardown_yamlmod():
import yamlmod
for hook in sys.meta_path:
if isinstance(hook, yamlmod.YamlImportHook):
sys.meta_path.remove(hook)
break
@with_setup(setup_yamlmod, teardown_yamlmod)
def test_import_installs_hook():
import yamlmod
hooks = []
for hook in sys.meta_path:
if isinstance(hook, yamlmod.YamlImportHook):
hooks.append(hook)
eq_(len(hooks), 1, 'did not find exactly one hook')
@with_setup(setup_yamlmod, teardown_yamlmod)
def test_import_fixture():
import fixture
eq_(fixture.debug, True)
eq_(fixture.domain, 'example.com')
eq_(fixture.users, ['alice', 'bob', 'cathy'])
@with_setup(setup_yamlmod, teardown_yamlmod)
def test_hidden_attributes():
import fixture
eq_(fixture.__name__, 'fixture')
eq_(fixture.__file__, os.path.join(os.path.dirname(__file__), 'fixture.yml'))
| Fix tests on python 3.3 | Fix tests on python 3.3
| Python | mit | sciyoshi/yamlmod |
08542b47b127d6bcf128bdedb5f25956f909784e | website_snippet_anchor/__openerp__.py | website_snippet_anchor/__openerp__.py | # -*- coding: utf-8 -*-
# © 2015 Antiun Ingeniería S.L. - Jairo Llopis
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
{
"name": "Set Snippet's Anchor",
"summary": "Allow to reach a concrete section in the page",
"version": "8.0.1.0.0",
"category": "Website",
"website": "http://www.antiun.com",
"author": "Antiun Ingeniería S.L., Odoo Community Association (OCA)",
"license": "AGPL-3",
"application": False,
"installable": True,
"external_dependencies": {
"python": [],
"bin": [],
},
"depends": [
"website",
],
"data": [
"views/assets.xml",
"views/snippets.xml",
],
}
| # -*- coding: utf-8 -*-
# © 2015 Antiun Ingeniería S.L. - Jairo Llopis
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
{
"name": "Set Snippet's Anchor",
"summary": "Allow to reach a concrete section in the page",
"version": "8.0.1.0.0",
"category": "Website",
"website": "http://www.antiun.com",
"author": "Antiun Ingeniería S.L., Odoo Community Association (OCA)",
"license": "AGPL-3",
"application": False,
"installable": True,
"depends": [
"website",
],
"data": [
"views/assets.xml",
"views/snippets.xml",
],
}
| Remove unused keys from manifest. | Remove unused keys from manifest.
| Python | agpl-3.0 | pedrobaeza/website,brain-tec/website,LasLabs/website,gfcapalbo/website,gfcapalbo/website,acsone/website,LasLabs/website,LasLabs/website,open-synergy/website,pedrobaeza/website,brain-tec/website,pedrobaeza/website,nuobit/website,nuobit/website,nuobit/website,gfcapalbo/website,Endika/website,pedrobaeza/website,Yajo/website,gfcapalbo/website,Endika/website,open-synergy/website,Antiun/website,kaerdsar/website,open-synergy/website,open-synergy/website,brain-tec/website,Yajo/website,acsone/website,nuobit/website,acsone/website,kaerdsar/website,Endika/website,acsone/website,LasLabs/website,Antiun/website,Endika/website,Yajo/website,Antiun/website,kaerdsar/website,brain-tec/website,Yajo/website,Antiun/website |