commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 1
2.94k
⌀ | new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
|
---|---|---|---|---|---|---|---|---|---|
4283035101ec8423126c340423be74d73f9b4184 | setup.py | setup.py | from distutils.core import setup
setup(
name = 'pybenchmark',
packages = ['pybenchmark'], # this must be the same as the name above
version = '0.0.3',
description = 'A benchmark utility used in performance tests.',
author = 'Eugene Duboviy',
author_email = 'eugene.dubovoy@gmail.com',
url = 'https://github.com/duboviy/pybenchmark', # use the URL to the github repo
download_url = 'https://github.com/duboviy/pybenchmark/tarball/0.0.3', # I'll explain this in a second
keywords = ['benchmark', 'performance', 'testing'], # arbitrary keywords
classifiers = [],
)
| from distutils.core import setup
setup(
name = 'pybenchmark',
packages = ['pybenchmark'], # this must be the same as the name above
version = '0.0.4',
description = 'A benchmark utility used in performance tests.',
author = 'Eugene Duboviy',
author_email = 'eugene.dubovoy@gmail.com',
url = 'https://github.com/duboviy/pybenchmark', # use the URL to the github repo
download_url = 'https://github.com/duboviy/pybenchmark/tarball/0.0.4', # I'll explain this in a second
keywords = ['benchmark', 'performance', 'testing'], # arbitrary keywords
classifiers = [],
)
| Add a new version to put repo on PyPI | Add a new version to put repo on PyPI
| Python | mit | duboviy/pybenchmark |
39e9b81fb2ebbe6da4b8056678834bb593205ccb | setup.py | setup.py | from setuptools import setup
setup(
name='crm114',
version='2.0.2',
author='Brian Cline',
author_email='brian.cline@gmail.com',
description=('Python wrapper classes for the CRM-114 Discriminator '
'(http://crm114.sourceforge.net/)'),
license = 'MIT',
keywords = 'crm114 text analysis classifier',
url = 'http://packages.python.org/crm114',
packages=['crm114'],
long_description='See README.md for full details, or '
'https://github.com/briancline/crm114-python'
'/blob/v2.0.1/README.md.',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'Natural Language :: English',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Topic :: Adaptive Technologies',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
'Topic :: Scientific/Engineering :: Information Analysis',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Text Processing',
],
)
| from setuptools import setup
VERSION = '2.0.2'
VERSION_TAG = 'v%s' % VERSION
README_URL = ('https://github.com/briancline/crm114-python'
'/blob/%s/README.md' % VERSION_TAG)
setup(
name='crm114',
version=VERSION,
author='Brian Cline',
author_email='brian.cline@gmail.com',
description=('Python wrapper classes for the CRM-114 Discriminator '
'(http://crm114.sourceforge.net/)'),
license = 'MIT',
keywords = 'crm114 text analysis classifier kubrick',
url = 'http://packages.python.org/crm114',
packages=['crm114'],
long_description='See README.md for full details, or %s.' % README_URL,
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'Natural Language :: English',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Topic :: Adaptive Technologies',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
'Topic :: Scientific/Engineering :: Information Analysis',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Text Processing',
],
)
| Use globals for major bits of package data | Use globals for major bits of package data
| Python | mit | briancline/crm114-python |
9549f70bf6c276f2124682e3f297caeb9c866770 | setup.py | setup.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Setup file for easy installation."""
from setuptools import setup
import glob
from spec_cleaner import __version__
setup(
name='spec_cleaner',
description='RPM .spec files cleaner',
long_description='Command-line tool for cleaning various formatting' +
'errors in RPM .spec files',
url='https://github.com/openSUSE/spec-cleaner',
download_url='https://github.com/openSUSE/spec-cleaner',
version=__version__,
author='Tomáš Chvátal',
author_email='tchvatal@suse.cz',
maintainer='Tomáš Chvátal',
maintainer_email='tchvatal@suse.cz',
license='License :: OSI Approved :: BSD License',
platforms=['Linux'],
keywords=['SUSE', 'RPM', '.spec', 'cleaner'],
tests_require=['mock', 'nose'],
test_suite="nose.collector",
packages=['spec_cleaner'],
data_files=[('/usr/lib/obs/service/', glob.glob('obs/*')),
('/usr/share/spec-cleaner/', glob.glob('data/*')),
],
entry_points={
'console_scripts': ['spec-cleaner = spec_cleaner:main']},
)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Setup file for easy installation."""
from setuptools import setup
import glob
from spec_cleaner import __version__
setup(
name='spec_cleaner',
description='RPM .spec files cleaner',
long_description='Command-line tool for cleaning various formatting ' +
'errors in RPM .spec files',
url='https://github.com/openSUSE/spec-cleaner',
download_url='https://github.com/openSUSE/spec-cleaner',
version=__version__,
author='Tomáš Chvátal',
author_email='tchvatal@suse.cz',
maintainer='Tomáš Chvátal',
maintainer_email='tchvatal@suse.cz',
license='License :: OSI Approved :: BSD License',
platforms=['Linux'],
keywords=['SUSE', 'RPM', '.spec', 'cleaner'],
tests_require=['mock', 'nose'],
test_suite="nose.collector",
packages=['spec_cleaner'],
data_files=[('/usr/lib/obs/service/', glob.glob('obs/*')),
('/usr/share/spec-cleaner/', glob.glob('data/*')),
],
entry_points={
'console_scripts': ['spec-cleaner = spec_cleaner:main']},
)
| Fix typo in longdesc of the package | Fix typo in longdesc of the package
| Python | bsd-3-clause | plusky/spec-cleaner,plusky/spec-cleaner,plusky/spec-cleaner,plusky/spec-cleaner,plusky/spec-cleaner |
0d1ec7d24e3b6272ccde8a2b02af35a29db145ab | setup.py | setup.py | import os
import setuptools
setuptools.setup(
name='lmj.sim',
version='0.0.2',
namespace_packages=['lmj'],
packages=setuptools.find_packages(),
author='Leif Johnson',
author_email='leif@leifjohnson.net',
description='Yet another OpenGL-with-physics simulation framework',
long_description=open(os.path.join(os.path.dirname(os.path.abspath(__file__)), 'README.md')).read(),
license='MIT',
url='http://github.com/lmjohns3/py-sim/',
keywords=('simulation '
'physics '
'ode '
'visualization '
),
install_requires=['lmj.cli', 'numpy', 'glumpy', 'PyOpenGL', 'PyODE'],
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
],
)
| import os
import setuptools
setuptools.setup(
name='lmj.sim',
version='0.0.2',
namespace_packages=['lmj'],
packages=setuptools.find_packages(),
author='Leif Johnson',
author_email='leif@leifjohnson.net',
description='Yet another OpenGL-with-physics simulation framework',
long_description=open(os.path.join(os.path.dirname(os.path.abspath(__file__)), 'README.md')).read(),
license='MIT',
url='http://github.com/lmjohns3/py-sim/',
keywords=('simulation '
'physics '
'ode '
'visualization '
),
install_requires=['lmj.cli', 'numpy', 'glumpy', 'PyOpenGL', 'Open-Dynamics-Engine'],
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
],
)
| Change dependency from PyODE to bindings included with ODE source. | Change dependency from PyODE to bindings included with ODE source.
| Python | mit | EmbodiedCognition/pagoda,EmbodiedCognition/pagoda |
eea8431d5f63fa6245ade4f66f787e62b8ea8b85 | setup.py | setup.py | import sys
from setuptools import setup, find_packages, Command
from distutils import log
setup(
name='diffenator',
version='0.1.0',
author="Google Fonts Project Authors",
description="Font regression tester for Google Fonts",
url="https://github.com/googlefonts/diffenator",
license="Apache Software License 2.0",
package_dir={"": "Lib"},
packages=find_packages("Lib"),
entry_points={
"console_scripts": [
"diffenator = diffenator.__main__:main",
"dumper = diffenator.dumper:main",
],
},
install_requires=[
"fonttools>=3.4.0",
],
)
| import sys
from setuptools import setup, find_packages, Command
from distutils import log
setup(
name='fontdiffenator',
version='0.1.0',
author="Google Fonts Project Authors",
description="Font regression tester for Google Fonts",
url="https://github.com/googlefonts/diffenator",
license="Apache Software License 2.0",
package_dir={"": "Lib"},
packages=find_packages("Lib"),
entry_points={
"console_scripts": [
"diffenator = diffenator.__main__:main",
"dumper = diffenator.dumper:main",
],
},
install_requires=[
"fonttools>=3.4.0",
],
)
| Change package name to fontdiffenator | Change package name to fontdiffenator
| Python | apache-2.0 | googlefonts/fontdiffenator,googlefonts/fontdiffenator |
eeace4d8c5c787a25c67b2b6bb2916582666ef82 | setup.py | setup.py | #!/usr/bin/env python
import chevron.metadata
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
try:
import pypandoc
readme = pypandoc.convert('README.md', 'rest')
except (ImportError, RuntimeError):
print('\n\n!!!\npypandoc not loaded\n!!!\n')
readme = ''
setup(name='chevron',
version=chevron.metadata.version,
license='MIT',
description='Mustache templating language renderer',
long_description=readme,
author='noah morrison',
author_email='noah@morrison.ph',
url='https://github.com/noahmorrison/chevron',
packages=['chevron'],
entry_points={
'console_scripts': ['chevron=chevron:cli_main']
},
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Text Processing :: Markup'
]
)
| #!/usr/bin/env python
import chevron.metadata
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
with open('README.md') as f:
readme = f.read()
setup(name='chevron',
version=chevron.metadata.version,
license='MIT',
description='Mustache templating language renderer',
long_description=readme,
author='noah morrison',
author_email='noah@morrison.ph',
url='https://github.com/noahmorrison/chevron',
packages=['chevron'],
entry_points={
'console_scripts': ['chevron=chevron:cli_main']
},
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Text Processing :: Markup'
]
)
| Remove pandoc, PyPi accepts markdown now | Remove pandoc, PyPi accepts markdown now
| Python | mit | noahmorrison/chevron,noahmorrison/chevron |
73de4216c697ff79cb256a1b8c3be464e6355370 | examples/flask_app/src/backend/main.py | examples/flask_app/src/backend/main.py | from threading import Thread, Lock
import logging
import webview
from time import sleep
from server import run_server
server_lock = Lock()
logger = logging.getLogger(__name__)
def url_ok(url, port):
# Use httplib on Python 2
try:
from http.client import HTTPConnection
except ImportError:
from httplib import HTTPConnection
try:
conn = HTTPConnection(url, port)
conn.request('GET', '/')
r = conn.getresponse()
return r.status == 200
except:
logger.exception('Server not started')
return False
if __name__ == '__main__':
logger.debug('Starting server')
t = Thread(target=run_server)
t.daemon = True
t.start()
logger.debug('Checking server')
while not url_ok('127.0.0.1', 23948):
sleep(1)
logger.debug('Server started')
window = webview.create_window('My first pywebview application', 'http://127.0.0.1:23948')
webview.start(debug=True)
| import logging
import webview
from contextlib import redirect_stdout
from io import StringIO
from threading import Thread, Lock
from time import sleep
from server import run_server
server_lock = Lock()
logger = logging.getLogger(__name__)
def url_ok(url, port):
# Use httplib on Python 2
try:
from http.client import HTTPConnection
except ImportError:
from httplib import HTTPConnection
try:
conn = HTTPConnection(url, port)
conn.request('GET', '/')
r = conn.getresponse()
return r.status == 200
except:
logger.exception('Server not started')
return False
if __name__ == '__main__':
stream = StringIO()
with redirect_stdout(stream):
logger.debug('Starting server')
t = Thread(target=run_server)
t.daemon = True
t.start()
logger.debug('Checking server')
while not url_ok('127.0.0.1', 23948):
sleep(1)
logger.debug('Server started')
window = webview.create_window('My first pywebview application', 'http://127.0.0.1:23948')
webview.start(debug=True)
| Fix Flask example to allow freezing | Fix Flask example to allow freezing
| Python | bsd-3-clause | r0x0r/pywebview,r0x0r/pywebview,r0x0r/pywebview,r0x0r/pywebview,r0x0r/pywebview |
dea503e03a7c18c256d902b0b6ad3cb66a7ce9a2 | examples/flexure/example_point_load.py | examples/flexure/example_point_load.py | #! /usr/bin/env python
"""
"""
from landlab import RasterModelGrid
from landlab.components.flexure import Flexure
def add_load_to_middle_of_grid(grid, load):
shape = grid.shape
load_array = grid.field_values(
"node", "lithosphere__overlying_pressure_increment"
).view()
load_array.shape = shape
load_array[shape[0] / 2, shape[1] / 2] = load
def main():
(n_rows, n_cols) = (100, 100)
(dy, dx) = (10e3, 10e3)
grid = RasterModelGrid(n_rows, n_cols, dx)
flex = Flexure(grid, method="flexure")
add_load_to_middle_of_grid(grid, 1e7)
flex.update()
grid.imshow(
"node",
"lithosphere_surface__elevation_increment",
symmetric_cbar=True,
show=True,
)
if __name__ == "__main__":
main()
| #! /usr/bin/env python
"""
"""
from landlab import RasterModelGrid
from landlab.components.flexure import Flexure
def add_load_to_middle_of_grid(grid, load):
shape = grid.shape
load_array = grid.field_values(
"node", "lithosphere__overlying_pressure_increment"
).view()
load_array.shape = shape
load_array[shape[0] / 2, shape[1] / 2] = load
def main():
(n_rows, n_cols) = (100, 100)
spacing = (10e3, 10e3)
grid = RasterModelGrid(n_rows, n_cols, spacing[1])
flex = Flexure(grid, method="flexure")
add_load_to_middle_of_grid(grid, 1e7)
flex.update()
grid.imshow(
"node",
"lithosphere_surface__elevation_increment",
symmetric_cbar=True,
show=True,
)
if __name__ == "__main__":
main()
| Fix F841: local variable is assigned to but never used. | Fix F841: local variable is assigned to but never used.
| Python | mit | amandersillinois/landlab,cmshobe/landlab,landlab/landlab,cmshobe/landlab,amandersillinois/landlab,cmshobe/landlab,landlab/landlab,landlab/landlab |
584956dce7cd607c6cb0d24d360d65d1c0be7005 | lib/pylprof/dump-stats.py | lib/pylprof/dump-stats.py | import json
stats = lp.get_stats()
unit = stats.unit
results = {}
for function, timings in stats.timings.iteritems():
module, line, fname = function
results[module] = {}
for sample in timings:
linenumber, ncalls, timing = sample
if not results[module].get(linenumber):
results[module][linenumber] = []
results[module][linenumber].append({
'name' : '',
'timing' : [ncalls, timing*unit, timing*unit*ncalls]
})
jsondump = json.dumps(results)
print('statsstart' + jsondump + 'statsend')
sys.stdout.flush()
exit()
| import json
import sys
from collections import defaultdict
stats = lp.get_stats()
unit = stats.unit
results = {}
for loc, timings in stats.timings.iteritems():
module, line, fname = loc
if not results.get(module):
results[module] = defaultdict(list)
for sample in timings:
linenumber, ncalls, timing = sample
results[module][linenumber].append({
'timing' : [ncalls, timing*unit, timing*unit*ncalls]
})
statsdump = json.dumps(results)
print('statsstart{0}statsend'.format(statsdump))
sys.stdout.flush()
exit()
| Fix bug when profiling multiple fcts per module | [pylprof] Fix bug when profiling multiple fcts per module
| Python | mit | iddl/pprofile,iddl/pprofile |
1c011be6d217ab8a62ed2f5c7f920d543519be2f | app/api_v1/__init__.py | app/api_v1/__init__.py | """This module creates the api BluePrint."""
from flask import BluePrint
api = BluePrint('api', __name__)
| """This module creates the api BluePrint."""
from flask import Blueprint
api = Blueprint('api', __name__)
| Fix typo in import message. | [Bug] Fix typo in import message.
| Python | mit | andela-akiura/bucketlist |
2b58374504242d4019fde208296802fe4fb1c4b3 | Lib/__init__.py | Lib/__init__.py | """\
SciPy --- A scientific computing package for Python
===================================================
You can support the development of SciPy by purchasing documentation
at
http://www.trelgol.com
It is being distributed for a fee for a limited time to try and raise
money for development.
Documentation is also available in the docstrings.
Available subpackages
---------------------
"""
import os, sys
SCIPY_IMPORT_VERBOSE = int(os.environ.get('SCIPY_IMPORT_VERBOSE','0'))
try:
import pkg_resources # activate namespace packages (manipulates __path__)
except ImportError:
pass
import numpy._import_tools as _ni
pkgload = _ni.PackageLoader()
del _ni
from numpy import *
del fft, ifft, info
import numpy
__all__.extend(filter(lambda x: x not in ['fft','ifft','info'], numpy.__all__))
del numpy
from numpy.testing import ScipyTest
test = ScipyTest('scipy').test
__all__.append('test')
from version import version as __version__
from numpy import __version__ as __numpy_version__
__all__.append('__version__')
__all__.append('__numpy_version__')
from __config__ import show as show_config
pkgload(verbose=SCIPY_IMPORT_VERBOSE,postpone=True)
| """\
SciPy --- A scientific computing package for Python
===================================================
You can support the development of SciPy by purchasing documentation
at
http://www.trelgol.com
It is being distributed for a fee for a limited time to try and raise
money for development.
Documentation is also available in the docstrings.
Available subpackages
---------------------
"""
import os, sys
SCIPY_IMPORT_VERBOSE = int(os.environ.get('SCIPY_IMPORT_VERBOSE','0'))
try:
import pkg_resources # activate namespace packages (manipulates __path__)
except ImportError:
pass
import numpy._import_tools as _ni
pkgload = _ni.PackageLoader()
del _ni
from numpy.testing import ScipyTest
test = ScipyTest('scipy').test
__all__.append('test')
from version import version as __version__
from numpy import __version__ as __numpy_version__
__all__.append('__version__')
__all__.append('__numpy_version__')
from __config__ import show as show_config
pkgload(verbose=SCIPY_IMPORT_VERBOSE,postpone=True)
| Remove auto include of numpy namespace. | Remove auto include of numpy namespace.
git-svn-id: 003f22d385e25de9cff933a5ea4efd77cb5e7b28@1522 d6536bca-fef9-0310-8506-e4c0a848fbcf
| Python | bsd-3-clause | scipy/scipy-svn,jasonmccampbell/scipy-refactor,jasonmccampbell/scipy-refactor,lesserwhirls/scipy-cwt,jasonmccampbell/scipy-refactor,lesserwhirls/scipy-cwt,scipy/scipy-svn,lesserwhirls/scipy-cwt,lesserwhirls/scipy-cwt,scipy/scipy-svn,scipy/scipy-svn,jasonmccampbell/scipy-refactor |
bdfb9f519de343a8802a66074f9c0af058c42978 | scripts/master/factory/dart/channels.py | scripts/master/factory/dart/channels.py | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
class Channel(object):
def __init__(self, name, branch, position, category_postfix, priority):
self.branch = branch
self.builder_postfix = '-' + name
self.category_postfix = category_postfix
self.name = name
self.position = position
self.priority = priority
self.all_deps_path = '/' + branch + '/deps/all.deps'
self.standalone_deps_path = '/' + branch + '/deps/standalone.deps'
self.dartium_deps_path = '/' + branch + '/deps/dartium.deps'
# The channel names are replicated in the slave.cfg files for all
# dart waterfalls. If you change anything here please also change it there.
CHANNELS = [
Channel('be', 'branches/bleeding_edge', 0, '', 4),
Channel('dev', 'trunk', 1, '-dev', 2),
Channel('stable', 'branches/1.8', 2, '-stable', 1),
Channel('integration', 'branches/dartium_integration', 3, '-integration', 3),
]
CHANNELS_BY_NAME = {}
for c in CHANNELS:
CHANNELS_BY_NAME[c.name] = c
| # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
class Channel(object):
def __init__(self, name, branch, position, category_postfix, priority):
self.branch = branch
self.builder_postfix = '-' + name
self.category_postfix = category_postfix
self.name = name
self.position = position
self.priority = priority
self.all_deps_path = '/' + branch + '/deps/all.deps'
self.standalone_deps_path = '/' + branch + '/deps/standalone.deps'
self.dartium_deps_path = '/' + branch + '/deps/dartium.deps'
# The channel names are replicated in the slave.cfg files for all
# dart waterfalls. If you change anything here please also change it there.
CHANNELS = [
Channel('be', 'branches/bleeding_edge', 0, '', 4),
Channel('dev', 'trunk', 1, '-dev', 2),
Channel('stable', 'branches/1.9', 2, '-stable', 1),
Channel('integration', 'branches/dartium_integration', 3, '-integration', 3),
]
CHANNELS_BY_NAME = {}
for c in CHANNELS:
CHANNELS_BY_NAME[c.name] = c
| Make stable builders pull from the 1.9 branch | Make stable builders pull from the 1.9 branch
TBR=kasperl@google.com
BUG=
Review URL: https://codereview.chromium.org/1029093006
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@294541 0039d316-1c4b-4281-b951-d872f2087c98
| Python | bsd-3-clause | eunchong/build,eunchong/build,eunchong/build,eunchong/build |
97529500f335ee1f75a6229c1c313bc7ff2d68ef | satchmo/apps/satchmo_store/shop/management/commands/satchmo_copy_static.py | satchmo/apps/satchmo_store/shop/management/commands/satchmo_copy_static.py | from django.core.management.base import NoArgsCommand
import os
import shutil
class Command(NoArgsCommand):
help = "Copy the satchmo static directory and files to the local project."
def handle_noargs(self, **options):
import satchmo_store
static_src = os.path.join(satchmo_store.__path__[0],'static')
static_dest = os.path.join(os.getcwd(), 'static')
if os.path.exists(static_dest):
print "Static directory exists. You must manually copy the files you need."
else:
shutil.copytree(static_src, static_dest)
for root, dirs, files in os.walk(static_dest):
if '.svn' in dirs:
shutil.rmtree(os.path.join(root,'.svn'), True)
print "Copied %s to %s" % (static_src, static_dest)
| from django.core.management.base import NoArgsCommand
import os
import shutil
class Command(NoArgsCommand):
help = "Copy the satchmo static directory and files to the local project."
def handle_noargs(self, **options):
import satchmo_store
static_src = os.path.join(satchmo_store.__path__[0],'../../static')
static_dest = os.path.join(os.getcwd(), 'static')
if os.path.exists(static_dest):
print "Static directory exists. You must manually copy the files you need."
else:
shutil.copytree(static_src, static_dest)
for root, dirs, files in os.walk(static_dest):
if '.svn' in dirs:
shutil.rmtree(os.path.join(root,'.svn'), True)
print "Copied %s to %s" % (static_src, static_dest)
| Change the static copy command so it works with new layout. | Change the static copy command so it works with new layout.
--HG--
extra : convert_revision : svn%3Aa38d40e9-c014-0410-b785-c606c0c8e7de/satchmo/trunk%401887
| Python | bsd-3-clause | dokterbob/satchmo,Ryati/satchmo,twidi/satchmo,ringemup/satchmo,ringemup/satchmo,Ryati/satchmo,twidi/satchmo,dokterbob/satchmo |
3c58a84a7eda661d48be72abda9001649ebe3ab0 | app_server/clparser.py | app_server/clparser.py | """This module contains all the logic required to parse the app
server's command line."""
#-------------------------------------------------------------------------------
import re
import logging
import optparse
import clparserutil
#-------------------------------------------------------------------------------
class CommandLineParser(optparse.OptionParser):
def __init__(self):
optparse.OptionParser.__init__(
self,
"usage: %prog [options]",
option_class=clparserutil.Option)
self.add_option(
"--log",
action="store",
dest="logging_level",
default=logging.ERROR,
type="logginglevel",
help="logging level [DEBUG,INFO,WARNING,ERROR,CRITICAL,FATAL] - default = ERRROR" )
self.add_option(
"--port",
action="store",
dest="port",
default=8080,
type=int,
help="port" )
#------------------------------------------------------------------- End-of-File
| """This module contains all the logic required to parse the app
server's command line."""
#-------------------------------------------------------------------------------
import re
import logging
import optparse
import clparserutil
#-------------------------------------------------------------------------------
class CommandLineParser(optparse.OptionParser):
def __init__(self):
optparse.OptionParser.__init__(
self,
"usage: %prog [options]",
option_class=clparserutil.Option)
self.add_option(
"--log",
action="store",
dest="logging_level",
default=logging.ERROR,
type="logginglevel",
help="logging level [DEBUG,INFO,WARNING,ERROR,CRITICAL,FATAL] - default = ERRROR" )
self.add_option(
"--port",
action="store",
dest="port",
default=8080,
type=int,
help="port - default = 8080" )
#------------------------------------------------------------------- End-of-File
| Improve help docs for app server's command line parser. Dave. | Improve help docs for app server's command line parser. Dave.
| Python | mit | simonsdave/yar,simonsdave/yar,simonsdave/yar,simonsdave/yar |
e9c4881ee29ba104caf9fc8330583c254fe52c06 | scripts/examples/Arduino/Portenta-H7/19-Low-Power/deep_sleep.py | scripts/examples/Arduino/Portenta-H7/19-Low-Power/deep_sleep.py | # Deep Sleep Mode Example
# This example demonstrates the low-power deep sleep mode plus sensor shutdown.
# Note the camera will reset after wake-up from deep sleep. To find out if the cause of reset
# is deep sleep, call the machine.reset_cause() function and test for machine.DEEPSLEEP_RESET
import pyb, machine, sensor
# Create and init RTC object.
rtc = pyb.RTC()
# (year, month, day[, hour[, minute[, second[, microsecond[, tzinfo]]]]])
rtc.datetime((2014, 5, 1, 4, 13, 0, 0, 0))
# Print RTC info.
print(rtc.datetime())
sensor.reset()
# Enable sensor softsleep
sensor.sleep(True)
# Optionally bypass the regulator on OV7725
# for the lowest possible power consumption.
if (sensor.get_id() == sensor.OV7725):
# Bypass internal regulator
sensor.__write_reg(0x4F, 0x18)
# Shutdown the sensor (pulls PWDN high).
sensor.shutdown(True)
# Enable RTC interrupts every 30 seconds.
# Note the camera will RESET after wakeup from Deepsleep Mode.
rtc.wakeup(30000)
# Enter Deepsleep Mode.
machine.deepsleep()
| # Deep Sleep Mode Example
# This example demonstrates the low-power deep sleep mode plus sensor shutdown.
# Note the camera will reset after wake-up from deep sleep. To find out if the cause of reset
# is deep sleep, call the machine.reset_cause() function and test for machine.DEEPSLEEP_RESET
import pyb, machine, sensor
# Create and init RTC object.
rtc = pyb.RTC()
# (year, month, day[, hour[, minute[, second[, microsecond[, tzinfo]]]]])
rtc.datetime((2014, 5, 1, 4, 13, 0, 0, 0))
# Print RTC info.
print(rtc.datetime())
sensor.reset()
# Shutdown the sensor (pulls PWDN high).
sensor.shutdown(True)
# Enable RTC interrupts every 30 seconds.
# Note the camera will RESET after wakeup from Deepsleep Mode.
rtc.wakeup(30000)
# Enter Deepsleep Mode.
machine.deepsleep()
| Remove sensor setting from deep sleep example | Remove sensor setting from deep sleep example
| Python | mit | iabdalkader/openmv,openmv/openmv,kwagyeman/openmv,iabdalkader/openmv,openmv/openmv,kwagyeman/openmv,openmv/openmv,iabdalkader/openmv,kwagyeman/openmv,iabdalkader/openmv,kwagyeman/openmv,openmv/openmv |
ae8273f86fc3cc7fdacadf495aa148dda796f11b | printcli.py | printcli.py | #!/usr/bin/env python2
import argparse
import os
from labelprinter import Labelprinter
if os.path.isfile('labelprinterServeConf_local.py'):
import labelprinterServeConf_local as conf
else:
import labelprinterServeConf as conf
def text(args, labelprinter):
bold = 'on' if args.bold else 'off'
labelprinter.printText(args.text,
charSize=args.char_size,
font=args.font,
align=args.align,
bold=bold,
charStyle=args.char_style,
cut=args.cut
)
parser = argparse.ArgumentParser(description="A command line interface to Labello.")
subparsers = parser.add_subparsers(help="commands")
parser_text = subparsers.add_parser("text", help="print a text")
parser_text.add_argument("text", type=str, help="the text to print")
parser_text.add_argument("--char_size", type=str, default='42')
parser_text.add_argument("--font", type=str, default='lettergothic')
parser_text.add_argument("--align", type=str, default='left')
parser_text.add_argument("--bold", action='store_true')
parser_text.add_argument("--char_style", type=str, default='normal')
parser_text.add_argument("--cut", type=str, default='full')
parser_text.set_defaults(func=text)
args = parser.parse_args()
labelprinter = Labelprinter(conf=conf)
args.func(args, labelprinter)
| #!/usr/bin/env python2
import argparse
import os
from labelprinter import Labelprinter
import labelprinterServeConf as conf
def text(args, labelprinter):
bold = 'on' if args.bold else 'off'
labelprinter.printText(args.text,
charSize=args.char_size,
font=args.font,
align=args.align,
bold=bold,
charStyle=args.char_style,
cut=args.cut
)
parser = argparse.ArgumentParser(description="A command line interface to Labello.")
subparsers = parser.add_subparsers(help="commands")
parser_text = subparsers.add_parser("text", help="print a text")
parser_text.add_argument("text", type=str, help="the text to print")
parser_text.add_argument("--char_size", type=str, default='42')
parser_text.add_argument("--font", type=str, default='lettergothic')
parser_text.add_argument("--align", type=str, default='left')
parser_text.add_argument("--bold", action='store_true')
parser_text.add_argument("--char_style", type=str, default='normal')
parser_text.add_argument("--cut", type=str, default='full')
parser_text.set_defaults(func=text)
args = parser.parse_args()
labelprinter = Labelprinter(conf=conf)
args.func(args, labelprinter)
| Make the CLI use the new config (see e4054fb). | Make the CLI use the new config (see e4054fb).
| Python | mit | chaosdorf/labello,chaosdorf/labello,chaosdorf/labello |
68e6321113c249508dad89688e58860ef5728d64 | microscopes/lda/runner.py | microscopes/lda/runner.py | """Implements the Runner interface fo LDA
"""
from microscopes.common import validator
from microscopes.common.rng import rng
from microscopes.lda.kernels import lda_crp_gibbs
from microscopes.lda.kernels import lda_sample_dispersion
class runner(object):
"""The LDA runner
Parameters
----------
defn : ``model_definition``: The structural definition.
view : A list of list of serializable objects (the 'documents')
latent : ``state``: The initialization state.
"""
def __init__(self, defn, view, latent, kernel_config='assign'):
self._defn = defn
self._view = view
self._latent = latent
def run(self, r, niters=10000):
"""Run the lda kernel for `niters`, in a single thread.
Parameters
----------
r : random state
niters : int
"""
validator.validate_type(r, rng, param_name='r')
validator.validate_positive(niters, param_name='niters')
for _ in xrange(niters):
lda_crp_gibbs(self._latent, r)
lda_sample_dispersion(self._latent, r)
| """Implements the Runner interface fo LDA
"""
from microscopes.common import validator
from microscopes.common.rng import rng
from microscopes.lda.kernels import lda_crp_gibbs
from microscopes.lda.kernels import sample_gamma, sample_alpha
class runner(object):
"""The LDA runner
Parameters
----------
defn : ``model_definition``: The structural definition.
view : A list of list of serializable objects (the 'documents')
latent : ``state``: The initialization state.
"""
def __init__(self, defn, view, latent, kernel_config='assign'):
self._defn = defn
self._view = view
self._latent = latent
def run(self, r, niters=10000):
"""Run the lda kernel for `niters`, in a single thread.
Parameters
----------
r : random state
niters : int
"""
validator.validate_type(r, rng, param_name='r')
validator.validate_positive(niters, param_name='niters')
for _ in xrange(niters):
lda_crp_gibbs(self._latent, r)
sample_gamma(self._latent, r, 5, 0.1)
sample_alpha(self._latent, r, 5, 0.1)
| Use C++ implementations of hp sampling | Use C++ implementations of hp sampling
| Python | bsd-3-clause | datamicroscopes/lda,datamicroscopes/lda,datamicroscopes/lda |
a32e61e9cdf2eababb568659766688a731b121cb | warlock/__init__.py | warlock/__init__.py | # Copyright 2012 Brian Waldon
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Public-facing Warlock API"""
from warlock.core import model_factory # NOQA
from warlock.exceptions import InvalidOperation # NOQA
| # Copyright 2012 Brian Waldon
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Public-facing Warlock API"""
from warlock.core import model_factory # noqa: F401
from warlock.exceptions import InvalidOperation # noqa: F401
| Apply 'no blanket NOQA statements' fixes enforced by pre-commit hook | Apply 'no blanket NOQA statements' fixes enforced by pre-commit hook
| Python | apache-2.0 | bcwaldon/warlock |
3dae7f461d34efceb2e8b0194306d85236fea1fc | src/main/python/piglatin.py | src/main/python/piglatin.py | import sys
def parseCommandLine(argv):
print 'Inside parser'
return argv[1] if len(argv) > 1 else ""
if __name__ == "__main__":
latin = parseCommandLine(sys.argv)
print(latin)
print("igpay atinlay")
| import sys
def parseCommandLine(argv):
return argv[1] if len(argv) > 1 else ""
if __name__ == "__main__":
latin = parseCommandLine(sys.argv)
print(latin)
print("igpay atinlay")
| Test case failing for python3 removed | Test case failing for python3 removed
| Python | mit | oneyoke/sw_asgmt_2 |
cdbe3f5ed5e65a14c1f40cc5daa84a9103e4322d | tests/test_boto_store.py | tests/test_boto_store.py | #!/usr/bin/env python
import os
from tempdir import TempDir
import pytest
boto = pytest.importorskip('boto')
from simplekv.net.botostore import BotoStore
from basic_store import BasicStore
from url_store import UrlStore
from bucket_manager import boto_credentials, boto_bucket
@pytest.fixture(params=boto_credentials,
ids=[c['access_key'] for c in boto_credentials])
def credentials(request):
return request.param
@pytest.yield_fixture()
def bucket(credentials):
with boto_bucket(**credentials) as bucket:
yield bucket
class TestBotoStorage(BasicStore, UrlStore):
@pytest.fixture(params=['', '/test-prefix'])
def prefix(self, request):
return request.param
@pytest.fixture
def store(self, bucket, prefix):
return BotoStore(bucket, prefix)
def test_get_filename_nonexistant(self, store):
# NOTE: boto misbehaves here and tries to erase the target file
# the parent tests use /dev/null, which you really should not try
# to os.remove!
with TempDir() as tmpdir:
with pytest.raises(KeyError):
store.get_file('nonexistantkey', os.path.join(tmpdir, 'a'))
| #!/usr/bin/env python
import os
from tempdir import TempDir
import pytest
boto = pytest.importorskip('boto')
from simplekv.net.botostore import BotoStore
from basic_store import BasicStore
from url_store import UrlStore
from bucket_manager import boto_credentials, boto_bucket
@pytest.fixture(params=boto_credentials,
ids=[c['access_key'] for c in boto_credentials])
def credentials(request):
return request.param
@pytest.yield_fixture()
def bucket(credentials):
with boto_bucket(**credentials) as bucket:
yield bucket
class TestBotoStorage(BasicStore, UrlStore):
@pytest.fixture(params=['', '/test-prefix'])
def prefix(self, request):
return request.param
@pytest.fixture
def store(self, bucket, prefix):
return BotoStore(bucket, prefix)
def test_get_filename_nonexistant(self, store, key):
# NOTE: boto misbehaves here and tries to erase the target file
# the parent tests use /dev/null, which you really should not try
# to os.remove!
with TempDir() as tmpdir:
with pytest.raises(KeyError):
store.get_file(key, os.path.join(tmpdir, 'a'))
| Use key fixture in boto tests. | Use key fixture in boto tests.
| Python | mit | fmarczin/simplekv,fmarczin/simplekv,karteek/simplekv,mbr/simplekv,karteek/simplekv,mbr/simplekv |
d65f39d85e98be8651863bcf617fb218e266d0bb | mpfmc/uix/relative_animation.py | mpfmc/uix/relative_animation.py | from kivy.animation import Animation
class RelativeAnimation(Animation):
"""Class that extends the Kivy Animation base class to add relative animation
property target values that are calculated when the animation starts."""
def _initialize(self, widget):
"""Initializes the animation and calculates the property target value
based on the current value plus the desired delta.
Notes: Do not call the base class _initialize method as this override
completely replaces the base class method."""
d = self._widgets[widget.uid] = {
'widget': widget,
'properties': {},
'time': None}
# get current values and calculate target values
p = d['properties']
for key, value in self._animated_properties.items():
original_value = getattr(widget, key)
if isinstance(original_value, (tuple, list)):
original_value = original_value[:]
target_value = map(lambda x, y: x + y, original_value, value)
elif isinstance(original_value, dict):
original_value = original_value.copy()
target_value = value
else:
target_value = original_value + value
p[key] = (original_value, target_value)
# install clock
self._clock_install()
| from kivy.animation import Animation
class RelativeAnimation(Animation):
"""Class that extends the Kivy Animation base class to add relative animation
property target values that are calculated when the animation starts."""
def _initialize(self, widget):
"""Initializes the animation and calculates the property target value
based on the current value plus the desired delta.
Notes: Do not call the base class _initialize method as this override
completely replaces the base class method."""
d = self._widgets[widget.uid] = {
'widget': widget,
'properties': {},
'time': None}
# get current values and calculate target values
p = d['properties']
for key, value in self._animated_properties.items():
original_value = getattr(widget, key)
if isinstance(original_value, (tuple, list)):
original_value = original_value[:]
target_value = [x + y for x, y in zip(original_value, value)]
elif isinstance(original_value, dict):
original_value = original_value.copy()
target_value = value
else:
target_value = original_value + value
p[key] = (original_value, target_value)
# install clock
self._clock_install()
| Fix relative animation of list values | Fix relative animation of list values
| Python | mit | missionpinball/mpf-mc,missionpinball/mpf-mc,missionpinball/mpf-mc |
12585ce38fc3ec7a0ddcf448cc398f694c7e29fb | dakis/api/views.py | dakis/api/views.py | from rest_framework import serializers, viewsets
from rest_framework import filters
from django.contrib.auth.models import User
from dakis.core.models import Experiment, Task
class ExperimentSerializer(serializers.HyperlinkedModelSerializer):
id = serializers.IntegerField(label='ID', read_only=True)
class Meta:
model = Experiment
exclude = ('author',)
def create(self, data):
user = self.context['request'].user
if user.is_authenticated():
data['author'] = user
return super(ExperimentSerializer, self).create(data)
class TaskSerializer(serializers.HyperlinkedModelSerializer):
id = serializers.IntegerField(label='ID', read_only=True)
class Meta:
model = Task
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = ('first_name', 'last_name', 'username', 'email')
class ExperimentViewSet(viewsets.ModelViewSet):
queryset = Experiment.objects.all()
serializer_class = ExperimentSerializer
class TaskViewSet(viewsets.ModelViewSet):
queryset = Task.objects.all()
serializer_class = TaskSerializer
filter_fields = ('experiment', 'func_cls', 'func_id', 'status')
filter_backends = (filters.DjangoFilterBackend,)
class UserViewSet(viewsets.ModelViewSet):
queryset = User.objects.all()
serializer_class = UserSerializer
| from rest_framework import serializers, viewsets
from rest_framework import filters
from django.contrib.auth.models import User
from dakis.core.models import Experiment, Task
class ExperimentSerializer(serializers.HyperlinkedModelSerializer):
id = serializers.IntegerField(label='ID', read_only=True)
class Meta:
model = Experiment
exclude = ('author', 'details')
def create(self, data):
user = self.context['request'].user
if user.is_authenticated():
data['author'] = user
return super(ExperimentSerializer, self).create(data)
class TaskSerializer(serializers.HyperlinkedModelSerializer):
id = serializers.IntegerField(label='ID', read_only=True)
class Meta:
model = Task
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = ('first_name', 'last_name', 'username', 'email')
class ExperimentViewSet(viewsets.ModelViewSet):
queryset = Experiment.objects.all()
serializer_class = ExperimentSerializer
class TaskViewSet(viewsets.ModelViewSet):
queryset = Task.objects.all()
serializer_class = TaskSerializer
filter_fields = ('experiment', 'func_cls', 'func_id', 'status')
filter_backends = (filters.DjangoFilterBackend,)
class UserViewSet(viewsets.ModelViewSet):
queryset = User.objects.all()
serializer_class = UserSerializer
| Exclude details field from editable through API | Exclude details field from editable through API
| Python | agpl-3.0 | niekas/dakis,niekas/dakis,niekas/dakis |
cfb50f4ff62770c397634897e09497b74b396067 | notifications/level_starting.py | notifications/level_starting.py | from consts.notification_type import NotificationType
from notifications.base_notification import BaseNotification
class CompLevelStartingNotification(BaseNotification):
def __init__(self, match, event):
self.match = match
self.event = event
def _build_dict(self):
data = {}
data['message_type'] = NotificationType.type_names[NotificationType.LEVEL_STARTING]
data['message_data'] = {}
data['message_data']['event_name'] = self.event.name
data['message_data']['comp_level'] = self.match.comp_level
data['message_data']['scheduled_time'] = self.match.time
return data
| from consts.notification_type import NotificationType
from notifications.base_notification import BaseNotification
class CompLevelStartingNotification(BaseNotification):
def __init__(self, match, event):
self.match = match
self.event = event
def _build_dict(self):
data = {}
data['message_type'] = NotificationType.type_names[NotificationType.LEVEL_STARTING]
data['message_data'] = {}
data['message_data']['event_name'] = self.event.name
data['message_data']['event_key'] = self.event.key_name
data['message_data']['comp_level'] = self.match.comp_level
data['message_data']['scheduled_time'] = self.match.time
return data
| Add event key to comp level starting notification | Add event key to comp level starting notification
| Python | mit | josephbisch/the-blue-alliance,synth3tk/the-blue-alliance,phil-lopreiato/the-blue-alliance,phil-lopreiato/the-blue-alliance,fangeugene/the-blue-alliance,bvisness/the-blue-alliance,nwalters512/the-blue-alliance,nwalters512/the-blue-alliance,josephbisch/the-blue-alliance,the-blue-alliance/the-blue-alliance,verycumbersome/the-blue-alliance,bdaroz/the-blue-alliance,bvisness/the-blue-alliance,jaredhasenklein/the-blue-alliance,fangeugene/the-blue-alliance,tsteward/the-blue-alliance,fangeugene/the-blue-alliance,nwalters512/the-blue-alliance,the-blue-alliance/the-blue-alliance,jaredhasenklein/the-blue-alliance,phil-lopreiato/the-blue-alliance,the-blue-alliance/the-blue-alliance,the-blue-alliance/the-blue-alliance,phil-lopreiato/the-blue-alliance,jaredhasenklein/the-blue-alliance,verycumbersome/the-blue-alliance,bdaroz/the-blue-alliance,josephbisch/the-blue-alliance,bdaroz/the-blue-alliance,verycumbersome/the-blue-alliance,jaredhasenklein/the-blue-alliance,nwalters512/the-blue-alliance,josephbisch/the-blue-alliance,1fish2/the-blue-alliance,1fish2/the-blue-alliance,tsteward/the-blue-alliance,verycumbersome/the-blue-alliance,1fish2/the-blue-alliance,phil-lopreiato/the-blue-alliance,jaredhasenklein/the-blue-alliance,bvisness/the-blue-alliance,bvisness/the-blue-alliance,fangeugene/the-blue-alliance,jaredhasenklein/the-blue-alliance,bdaroz/the-blue-alliance,tsteward/the-blue-alliance,tsteward/the-blue-alliance,nwalters512/the-blue-alliance,synth3tk/the-blue-alliance,synth3tk/the-blue-alliance,bdaroz/the-blue-alliance,synth3tk/the-blue-alliance,tsteward/the-blue-alliance,1fish2/the-blue-alliance,1fish2/the-blue-alliance,verycumbersome/the-blue-alliance,verycumbersome/the-blue-alliance,tsteward/the-blue-alliance,synth3tk/the-blue-alliance,nwalters512/the-blue-alliance,josephbisch/the-blue-alliance,bvisness/the-blue-alliance,fangeugene/the-blue-alliance,the-blue-alliance/the-blue-alliance,phil-lopreiato/the-blue-alliance,bdaroz/the-blue-alliance,josephbisch/the-blue-alliance,1fish2/the-blue-alliance,the-blue-alliance/the-blue-alliance,synth3tk/the-blue-alliance,bvisness/the-blue-alliance,fangeugene/the-blue-alliance |
0177066012b3373753cba8baf86f00a365d7147b | findaconf/tests/config.py | findaconf/tests/config.py | # coding: utf-8
from decouple import config
from findaconf.tests.fake_data import fake_conference, seed
def set_app(app, db=False):
unset_app(db)
app.config['TESTING'] = True
app.config['WTF_CSRF_ENABLED'] = False
if db:
app.config['SQLALCHEMY_DATABASE_URI'] = config(
'DATABASE_URL_TEST',
default='sqlite:///' + app.config['BASEDIR'].child('findaconf',
'tests',
'tests.db')
)
test_app = app.test_client()
if db:
db.create_all()
seed(app, db)
[db.session.add(fake_conference(db)) for i in range(1, 43)]
db.session.commit()
return test_app
def unset_app(db=False):
if db:
db.session.remove()
db.drop_all()
| # coding: utf-8
from decouple import config
from findaconf.tests.fake_data import fake_conference, seed
def set_app(app, db=False):
# set test vars
app.config['TESTING'] = True
app.config['WTF_CSRF_ENABLED'] = False
# set test db
if db:
app.config['SQLALCHEMY_DATABASE_URI'] = config(
'DATABASE_URL_TEST',
default='sqlite:///' + app.config['BASEDIR'].child('findaconf',
'tests',
'tests.db')
)
# create test app
test_app = app.test_client()
# create and feed db tables
if db:
# start from a clean db
db.session.remove()
db.drop_all()
# create tables and feed them
db.create_all()
seed(app, db)
[db.session.add(fake_conference(db)) for i in range(1, 43)]
db.session.commit()
# return test app
return test_app
def unset_app(db=False):
if db:
db.session.remove()
db.drop_all()
| Fix bug that used dev db instead of test db | Fix bug that used dev db instead of test db
| Python | mit | cuducos/findaconf,koorukuroo/findaconf,cuducos/findaconf,koorukuroo/findaconf,koorukuroo/findaconf,cuducos/findaconf |
592df76f77c3450ba56b249ab0cd4404c8dd99e2 | bundle_graph.py | bundle_graph.py | #!/usr/bin/python3
from random import randint
class Student:
def __init__(self, id):
self.id = id
self.papers = []
def assign_paper(self, paper):
self.papers.append(paper)
def __str__(self):
return str(self.id) + ": " + str(self.papers)
class Paper:
def __init__(self, id):
self.id = id
def create_bundle_graph(n, k):
students = [Student(x + 1) for x in range(n)]
papers = [Paper(x + 1) for x in range(n)]
while True:
for i in range(k):
inavai_pap = set()
for j in range(len(students)):
paper = None
while True:
paper = papers[randint(0, len(papers) - 1)]
if paper.id == students[j].id:
continue
if paper.id not in inavai_pap and paper.id not in students[j].papers:
inavai_pap.add(paper.id)
break
students[j].assign_paper(paper.id)
# make sure not more than one paper is assigned to every two people
success = True
for i in range(n):
for j in range(i + 1, n):
cnt = 0
for l in range(k):
if students[i].papers[l] == students[j].papers[l]:
cnt = cnt + 1
if cnt >= 2:
success = False
break
if not success:
break
if not success:
break
if success:
break
return students
| #!/usr/bin/python3
from random import randint
class Student:
def __init__(self, id):
self.id = id
self.papers = []
def assign_paper(self, paper):
self.papers.append(paper)
def __str__(self):
return str(self.id) + ": " + str(self.papers)
class Paper:
def __init__(self, id):
self.id = id
def create_bundle_graph(n, k):
students = [Student(x + 1) for x in range(n)]
papers = [Paper(x + 1) for x in range(n)]
while True:
for i in range(k):
inavai_pap = set()
for j in range(len(students)):
paper = None
while True:
paper = papers[randint(0, len(papers) - 1)]
if paper.id == students[j].id:
continue
if paper.id not in inavai_pap and paper.id not in students[j].papers:
inavai_pap.add(paper.id)
break
students[j].assign_paper(paper.id)
# make sure not more than one paper is assigned to every two people
success = True
for i in range(n):
for j in range(i + 1, n):
cnt = len(set(students[i].papers).intersection(set(students[j].papers)))
if cnt >= 2:
success = False
break
if not success:
break
if success:
break
return students
| Update validation check for paper bundles. | Update validation check for paper bundles.
| Python | mit | haoyueping/peer-grading-for-MOOCs |
86cfa4e3b60962fa04185efb6500fa6a6c0f4880 | infosystem/subsystem/route/resource.py | infosystem/subsystem/route/resource.py | from sqlalchemy import UniqueConstraint
from infosystem.common.subsystem import entity
from infosystem.database import db
class Route(entity.Entity, db.Model):
# TODO(samueldmq): recheck string lengths for below attributes
# TODO(samueldmq): add an 'active' attribute
attributes = ['id', 'name', 'url', 'method', 'admin', 'bypass']
name = db.Column(db.String(20), nullable=False)
url = db.Column(db.String(80), nullable=False)
method = db.Column(db.String(10), nullable=False)
admin = db.Column(db.Boolean(), nullable=False)
bypass = db.Column(db.Boolean(), nullable=False)
__table_args__ = (UniqueConstraint('url', 'method', name='route_uk'),)
def __init__(self, id, name, url, method, admin=False, bypass=False):
self.id = id
self.name = name
self.url = url
self.method = method
self.method = method
self.bypass = bypass
self.admin = admin
| from sqlalchemy import UniqueConstraint
from infosystem.common.subsystem import entity
from infosystem.database import db
class Route(entity.Entity, db.Model):
# TODO(samueldmq): recheck string lengths for below attributes
# TODO(samueldmq): add an 'active' attribute
attributes = ['id', 'name', 'url', 'method', 'sysadmin', 'bypass']
name = db.Column(db.String(20), nullable=False)
url = db.Column(db.String(80), nullable=False)
method = db.Column(db.String(10), nullable=False)
sysadmin = db.Column(db.Boolean(), nullable=False)
bypass = db.Column(db.Boolean(), nullable=False)
__table_args__ = (UniqueConstraint('url', 'method', name='route_uk'),)
def __init__(self, id, name, url, method, sysadmin=False, bypass=False):
self.id = id
self.name = name
self.url = url
self.method = method
self.method = method
self.bypass = bypass
self.sysadmin = sysadmin
| Change admin attribute to sysadmin | Change admin attribute to sysadmin
| Python | apache-2.0 | samueldmq/infosystem |
4ec8f09736113630208325cb08f1ffe07e8acc26 | go_cli/tests/test_main.py | go_cli/tests/test_main.py | """ Tests fir go_cli.main. """
from unittest import TestCase
from click.testing import CliRunner
from go_cli.main import cli
class TestCli(TestCase):
def test_help(self):
runner = CliRunner()
result = runner.invoke(cli, ['--help'])
self.assertEqual(result.exit_code, 0)
self.assertTrue("Vumi Go command line utility." in result.output)
self.assertTrue("send Send messages via an HTTP API (nostream)..."
in result.output)
def test_version(self):
runner = CliRunner()
result = runner.invoke(cli, ['--version'])
self.assertEqual(result.exit_code, 0)
self.assertTrue("go_cli, version " in result.output)
| """ Tests for go_cli.main. """
from unittest import TestCase
from click.testing import CliRunner
from go_cli.main import cli
class TestCli(TestCase):
def test_help(self):
runner = CliRunner()
result = runner.invoke(cli, ['--help'])
self.assertEqual(result.exit_code, 0)
self.assertTrue("Vumi Go command line utility." in result.output)
self.assertTrue("send Send messages via an HTTP API (nostream)..."
in result.output)
def test_version(self):
runner = CliRunner()
result = runner.invoke(cli, ['--version'])
self.assertEqual(result.exit_code, 0)
self.assertTrue("go_cli, version " in result.output)
| Fix typo in module docstring. | Fix typo in module docstring.
| Python | bsd-3-clause | praekelt/go-cli,praekelt/go-cli |